From c7c1dbeb65122e68e193cdbd8f45813ecdc55c82 Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Fri, 30 Mar 2012 21:38:44 +0000 Subject: [PATCH] HBASE-5443 Create PB protocols for HRegionInterface git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1307625 13f79535-47bb-0310-9956-ffa450edef68 --- .../hbase/mapreduce/HRegionPartitioner.java | 3 +- .../hbase/protobuf/generated/HBaseProtos.java | 3190 +++ .../protobuf/generated/RegionAdminProtos.java | 15454 +++++++++++ .../generated/RegionClientProtos.java | 21773 ++++++++++++++++ .../apache/hadoop/hbase/protobuf/package.html | 30 + src/main/protobuf/README.txt | 27 + src/main/protobuf/RegionAdmin.proto | 236 + src/main/protobuf/RegionClient.proto | 372 + src/main/protobuf/hbase.proto | 103 + 9 files changed, 41187 insertions(+), 1 deletion(-) create mode 100644 src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java create mode 100644 src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java create mode 100644 src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java create mode 100644 src/main/java/org/apache/hadoop/hbase/protobuf/package.html create mode 100644 src/main/protobuf/README.txt create mode 100644 src/main/protobuf/RegionAdmin.proto create mode 100644 src/main/protobuf/RegionClient.proto create mode 100644 src/main/protobuf/hbase.proto diff --git a/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java b/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java index 8401c98a7a4..f0383c1cb0a 100644 --- a/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java +++ b/src/main/java/org/apache/hadoop/hbase/mapreduce/HRegionPartitioner.java @@ -120,8 +120,9 @@ implements Configurable { */ @Override public void setConf(Configuration configuration) { - this.conf = HBaseConfiguration.create(configuration); + this.conf = configuration; //HBaseConfiguration.create(configuration); try { + HBaseConfiguration.addHbaseResources(conf); this.table = new HTable(this.conf, configuration.get(TableOutputFormat.OUTPUT_TABLE)); } catch (IOException e) { diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java new file mode 100644 index 00000000000..4026da04f99 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java @@ -0,0 +1,3190 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: hbase.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class HBaseProtos { + private HBaseProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public enum KeyType + implements com.google.protobuf.ProtocolMessageEnum { + MINIMUM(0, 0), + PUT(1, 4), + DELETE(2, 8), + DELETE_COLUMN(3, 12), + DELETE_FAMILY(4, 14), + MAXIMUM(5, 255), + ; + + public static final int MINIMUM_VALUE = 0; + public static final int PUT_VALUE = 4; + public static final int DELETE_VALUE = 8; + public static final int DELETE_COLUMN_VALUE = 12; + public static final int DELETE_FAMILY_VALUE = 14; + public static final int MAXIMUM_VALUE = 255; + + + public final int getNumber() { return value; } + + public static KeyType valueOf(int value) { + switch (value) { + case 0: return MINIMUM; + case 4: return PUT; + case 8: return DELETE; + case 12: return DELETE_COLUMN; + case 14: return DELETE_FAMILY; + case 255: return MAXIMUM; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public KeyType findValueByNumber(int number) { + return KeyType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor().getEnumTypes().get(0); + } + + private static final KeyType[] VALUES = { + MINIMUM, PUT, DELETE, DELETE_COLUMN, DELETE_FAMILY, MAXIMUM, + }; + + public static KeyType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private KeyType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:KeyType) + } + + public interface RegionInfoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 regionId = 1; + boolean hasRegionId(); + long getRegionId(); + + // required bytes tableName = 2; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // optional bytes startKey = 3; + boolean hasStartKey(); + com.google.protobuf.ByteString getStartKey(); + + // optional bytes endKey = 4; + boolean hasEndKey(); + com.google.protobuf.ByteString getEndKey(); + + // optional bool offline = 5; + boolean hasOffline(); + boolean getOffline(); + + // optional bool split = 6; + boolean hasSplit(); + boolean getSplit(); + } + public static final class RegionInfo extends + com.google.protobuf.GeneratedMessage + implements RegionInfoOrBuilder { + // Use RegionInfo.newBuilder() to construct. + private RegionInfo(Builder builder) { + super(builder); + } + private RegionInfo(boolean noInit) {} + + private static final RegionInfo defaultInstance; + public static RegionInfo getDefaultInstance() { + return defaultInstance; + } + + public RegionInfo getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 regionId = 1; + public static final int REGIONID_FIELD_NUMBER = 1; + private long regionId_; + public boolean hasRegionId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getRegionId() { + return regionId_; + } + + // required bytes tableName = 2; + public static final int TABLENAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // optional bytes startKey = 3; + public static final int STARTKEY_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString startKey_; + public boolean hasStartKey() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getStartKey() { + return startKey_; + } + + // optional bytes endKey = 4; + public static final int ENDKEY_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString endKey_; + public boolean hasEndKey() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getEndKey() { + return endKey_; + } + + // optional bool offline = 5; + public static final int OFFLINE_FIELD_NUMBER = 5; + private boolean offline_; + public boolean hasOffline() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getOffline() { + return offline_; + } + + // optional bool split = 6; + public static final int SPLIT_FIELD_NUMBER = 6; + private boolean split_; + public boolean hasSplit() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getSplit() { + return split_; + } + + private void initFields() { + regionId_ = 0L; + tableName_ = com.google.protobuf.ByteString.EMPTY; + startKey_ = com.google.protobuf.ByteString.EMPTY; + endKey_ = com.google.protobuf.ByteString.EMPTY; + offline_ = false; + split_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegionId()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, regionId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, startKey_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, endKey_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(5, offline_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(6, split_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, regionId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, startKey_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, endKey_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(5, offline_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(6, split_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) obj; + + boolean result = true; + result = result && (hasRegionId() == other.hasRegionId()); + if (hasRegionId()) { + result = result && (getRegionId() + == other.getRegionId()); + } + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasStartKey() == other.hasStartKey()); + if (hasStartKey()) { + result = result && getStartKey() + .equals(other.getStartKey()); + } + result = result && (hasEndKey() == other.hasEndKey()); + if (hasEndKey()) { + result = result && getEndKey() + .equals(other.getEndKey()); + } + result = result && (hasOffline() == other.hasOffline()); + if (hasOffline()) { + result = result && (getOffline() + == other.getOffline()); + } + result = result && (hasSplit() == other.hasSplit()); + if (hasSplit()) { + result = result && (getSplit() + == other.getSplit()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegionId()) { + hash = (37 * hash) + REGIONID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getRegionId()); + } + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasStartKey()) { + hash = (37 * hash) + STARTKEY_FIELD_NUMBER; + hash = (53 * hash) + getStartKey().hashCode(); + } + if (hasEndKey()) { + hash = (37 * hash) + ENDKEY_FIELD_NUMBER; + hash = (53 * hash) + getEndKey().hashCode(); + } + if (hasOffline()) { + hash = (37 * hash) + OFFLINE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getOffline()); + } + if (hasSplit()) { + hash = (37 * hash) + SPLIT_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getSplit()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionInfo_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + regionId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + startKey_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + endKey_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + offline_ = false; + bitField0_ = (bitField0_ & ~0x00000010); + split_ = false; + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.regionId_ = regionId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.startKey_ = startKey_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.endKey_ = endKey_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.offline_ = offline_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.split_ = split_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) return this; + if (other.hasRegionId()) { + setRegionId(other.getRegionId()); + } + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasStartKey()) { + setStartKey(other.getStartKey()); + } + if (other.hasEndKey()) { + setEndKey(other.getEndKey()); + } + if (other.hasOffline()) { + setOffline(other.getOffline()); + } + if (other.hasSplit()) { + setSplit(other.getSplit()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegionId()) { + + return false; + } + if (!hasTableName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + regionId_ = input.readUInt64(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + tableName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + startKey_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + endKey_ = input.readBytes(); + break; + } + case 40: { + bitField0_ |= 0x00000010; + offline_ = input.readBool(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + split_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 regionId = 1; + private long regionId_ ; + public boolean hasRegionId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getRegionId() { + return regionId_; + } + public Builder setRegionId(long value) { + bitField0_ |= 0x00000001; + regionId_ = value; + onChanged(); + return this; + } + public Builder clearRegionId() { + bitField0_ = (bitField0_ & ~0x00000001); + regionId_ = 0L; + onChanged(); + return this; + } + + // required bytes tableName = 2; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000002); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // optional bytes startKey = 3; + private com.google.protobuf.ByteString startKey_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasStartKey() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getStartKey() { + return startKey_; + } + public Builder setStartKey(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + startKey_ = value; + onChanged(); + return this; + } + public Builder clearStartKey() { + bitField0_ = (bitField0_ & ~0x00000004); + startKey_ = getDefaultInstance().getStartKey(); + onChanged(); + return this; + } + + // optional bytes endKey = 4; + private com.google.protobuf.ByteString endKey_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasEndKey() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getEndKey() { + return endKey_; + } + public Builder setEndKey(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + endKey_ = value; + onChanged(); + return this; + } + public Builder clearEndKey() { + bitField0_ = (bitField0_ & ~0x00000008); + endKey_ = getDefaultInstance().getEndKey(); + onChanged(); + return this; + } + + // optional bool offline = 5; + private boolean offline_ ; + public boolean hasOffline() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getOffline() { + return offline_; + } + public Builder setOffline(boolean value) { + bitField0_ |= 0x00000010; + offline_ = value; + onChanged(); + return this; + } + public Builder clearOffline() { + bitField0_ = (bitField0_ & ~0x00000010); + offline_ = false; + onChanged(); + return this; + } + + // optional bool split = 6; + private boolean split_ ; + public boolean hasSplit() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getSplit() { + return split_; + } + public Builder setSplit(boolean value) { + bitField0_ |= 0x00000020; + split_ = value; + onChanged(); + return this; + } + public Builder clearSplit() { + bitField0_ = (bitField0_ & ~0x00000020); + split_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RegionInfo) + } + + static { + defaultInstance = new RegionInfo(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionInfo) + } + + public interface RegionSpecifierOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier.RegionSpecifierType type = 1; + boolean hasType(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType(); + + // required bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class RegionSpecifier extends + com.google.protobuf.GeneratedMessage + implements RegionSpecifierOrBuilder { + // Use RegionSpecifier.newBuilder() to construct. + private RegionSpecifier(Builder builder) { + super(builder); + } + private RegionSpecifier(boolean noInit) {} + + private static final RegionSpecifier defaultInstance; + public static RegionSpecifier getDefaultInstance() { + return defaultInstance; + } + + public RegionSpecifier getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable; + } + + public enum RegionSpecifierType + implements com.google.protobuf.ProtocolMessageEnum { + REGION_NAME(0, 1), + ENCODED_REGION_NAME(1, 2), + ; + + public static final int REGION_NAME_VALUE = 1; + public static final int ENCODED_REGION_NAME_VALUE = 2; + + + public final int getNumber() { return value; } + + public static RegionSpecifierType valueOf(int value) { + switch (value) { + case 1: return REGION_NAME; + case 2: return ENCODED_REGION_NAME; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public RegionSpecifierType findValueByNumber(int number) { + return RegionSpecifierType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor().getEnumTypes().get(0); + } + + private static final RegionSpecifierType[] VALUES = { + REGION_NAME, ENCODED_REGION_NAME, + }; + + public static RegionSpecifierType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private RegionSpecifierType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:RegionSpecifier.RegionSpecifierType) + } + + private int bitField0_; + // required .RegionSpecifier.RegionSpecifierType type = 1; + public static final int TYPE_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_; + public boolean hasType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { + return type_; + } + + // required bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeEnum(1, type_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(1, type_.getNumber()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) obj; + + boolean result = true; + result = result && (hasType() == other.hasType()); + if (hasType()) { + result = result && + (getType() == other.getType()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasType()) { + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getType()); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_RegionSpecifier_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.type_ = type_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) return this; + if (other.hasType()) { + setType(other.getType()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasType()) { + + return false; + } + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + bitField0_ |= 0x00000001; + type_ = value; + } + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier.RegionSpecifierType type = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + public boolean hasType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType getType() { + return type_; + } + public Builder setType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + type_ = value; + onChanged(); + return this; + } + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000001); + type_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType.REGION_NAME; + onChanged(); + return this; + } + + // required bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RegionSpecifier) + } + + static { + defaultInstance = new RegionSpecifier(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RegionSpecifier) + } + + public interface TimeRangeOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional uint64 from = 1; + boolean hasFrom(); + long getFrom(); + + // optional uint64 to = 2; + boolean hasTo(); + long getTo(); + } + public static final class TimeRange extends + com.google.protobuf.GeneratedMessage + implements TimeRangeOrBuilder { + // Use TimeRange.newBuilder() to construct. + private TimeRange(Builder builder) { + super(builder); + } + private TimeRange(boolean noInit) {} + + private static final TimeRange defaultInstance; + public static TimeRange getDefaultInstance() { + return defaultInstance; + } + + public TimeRange getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable; + } + + private int bitField0_; + // optional uint64 from = 1; + public static final int FROM_FIELD_NUMBER = 1; + private long from_; + public boolean hasFrom() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getFrom() { + return from_; + } + + // optional uint64 to = 2; + public static final int TO_FIELD_NUMBER = 2; + private long to_; + public boolean hasTo() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getTo() { + return to_; + } + + private void initFields() { + from_ = 0L; + to_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, from_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, to_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, from_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, to_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) obj; + + boolean result = true; + result = result && (hasFrom() == other.hasFrom()); + if (hasFrom()) { + result = result && (getFrom() + == other.getFrom()); + } + result = result && (hasTo() == other.hasTo()); + if (hasTo()) { + result = result && (getTo() + == other.getTo()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFrom()) { + hash = (37 * hash) + FROM_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getFrom()); + } + if (hasTo()) { + hash = (37 * hash) + TO_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTo()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_TimeRange_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + from_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + to_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.from_ = from_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.to_ = to_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) return this; + if (other.hasFrom()) { + setFrom(other.getFrom()); + } + if (other.hasTo()) { + setTo(other.getTo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + from_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + to_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // optional uint64 from = 1; + private long from_ ; + public boolean hasFrom() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getFrom() { + return from_; + } + public Builder setFrom(long value) { + bitField0_ |= 0x00000001; + from_ = value; + onChanged(); + return this; + } + public Builder clearFrom() { + bitField0_ = (bitField0_ & ~0x00000001); + from_ = 0L; + onChanged(); + return this; + } + + // optional uint64 to = 2; + private long to_ ; + public boolean hasTo() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getTo() { + return to_; + } + public Builder setTo(long value) { + bitField0_ |= 0x00000002; + to_ = value; + onChanged(); + return this; + } + public Builder clearTo() { + bitField0_ = (bitField0_ & ~0x00000002); + to_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:TimeRange) + } + + static { + defaultInstance = new TimeRange(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:TimeRange) + } + + public interface KeyValueOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required bytes family = 2; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required bytes qualifier = 3; + boolean hasQualifier(); + com.google.protobuf.ByteString getQualifier(); + + // optional uint64 timestamp = 4; + boolean hasTimestamp(); + long getTimestamp(); + + // optional .KeyType keyType = 5; + boolean hasKeyType(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType getKeyType(); + + // optional bytes value = 6; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class KeyValue extends + com.google.protobuf.GeneratedMessage + implements KeyValueOrBuilder { + // Use KeyValue.newBuilder() to construct. + private KeyValue(Builder builder) { + super(builder); + } + private KeyValue(boolean noInit) {} + + private static final KeyValue defaultInstance; + public static KeyValue getDefaultInstance() { + return defaultInstance; + } + + public KeyValue getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required bytes family = 2; + public static final int FAMILY_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required bytes qualifier = 3; + public static final int QUALIFIER_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString qualifier_; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + // optional uint64 timestamp = 4; + public static final int TIMESTAMP_FIELD_NUMBER = 4; + private long timestamp_; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getTimestamp() { + return timestamp_; + } + + // optional .KeyType keyType = 5; + public static final int KEYTYPE_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType keyType_; + public boolean hasKeyType() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType getKeyType() { + return keyType_; + } + + // optional bytes value = 6; + public static final int VALUE_FIELD_NUMBER = 6; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + timestamp_ = 0L; + keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasQualifier()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(4, timestamp_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeEnum(5, keyType_.getNumber()); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBytes(6, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, timestamp_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(5, keyType_.getNumber()); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(6, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && (hasKeyType() == other.hasKeyType()); + if (hasKeyType()) { + result = result && + (getKeyType() == other.getKeyType()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + if (hasKeyType()) { + hash = (37 * hash) + KEYTYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getKeyType()); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_KeyValue_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; + bitField0_ = (bitField0_ & ~0x00000010); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.qualifier_ = qualifier_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.timestamp_ = timestamp_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.keyType_ = keyType_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + if (other.hasKeyType()) { + setKeyType(other.getKeyType()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasFamily()) { + + return false; + } + if (!hasQualifier()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + family_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + qualifier_ = input.readBytes(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + timestamp_ = input.readUInt64(); + break; + } + case 40: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(5, rawValue); + } else { + bitField0_ |= 0x00000010; + keyType_ = value; + } + break; + } + case 50: { + bitField0_ |= 0x00000020; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required bytes family = 2; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000002); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required bytes qualifier = 3; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + qualifier_ = value; + onChanged(); + return this; + } + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000004); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // optional uint64 timestamp = 4; + private long timestamp_ ; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getTimestamp() { + return timestamp_; + } + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000008; + timestamp_ = value; + onChanged(); + return this; + } + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000008); + timestamp_ = 0L; + onChanged(); + return this; + } + + // optional .KeyType keyType = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; + public boolean hasKeyType() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType getKeyType() { + return keyType_; + } + public Builder setKeyType(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + keyType_ = value; + onChanged(); + return this; + } + public Builder clearKeyType() { + bitField0_ = (bitField0_ & ~0x00000010); + keyType_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyType.MINIMUM; + onChanged(); + return this; + } + + // optional bytes value = 6; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000020); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:KeyValue) + } + + static { + defaultInstance = new KeyValue(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:KeyValue) + } + + public interface ServerNameOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string hostName = 1; + boolean hasHostName(); + String getHostName(); + + // optional uint32 port = 2; + boolean hasPort(); + int getPort(); + + // optional uint64 startCode = 3; + boolean hasStartCode(); + long getStartCode(); + } + public static final class ServerName extends + com.google.protobuf.GeneratedMessage + implements ServerNameOrBuilder { + // Use ServerName.newBuilder() to construct. + private ServerName(Builder builder) { + super(builder); + } + private ServerName(boolean noInit) {} + + private static final ServerName defaultInstance; + public static ServerName getDefaultInstance() { + return defaultInstance; + } + + public ServerName getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable; + } + + private int bitField0_; + // required string hostName = 1; + public static final int HOSTNAME_FIELD_NUMBER = 1; + private java.lang.Object hostName_; + public boolean hasHostName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getHostName() { + java.lang.Object ref = hostName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + hostName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getHostNameBytes() { + java.lang.Object ref = hostName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + hostName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional uint32 port = 2; + public static final int PORT_FIELD_NUMBER = 2; + private int port_; + public boolean hasPort() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getPort() { + return port_; + } + + // optional uint64 startCode = 3; + public static final int STARTCODE_FIELD_NUMBER = 3; + private long startCode_; + public boolean hasStartCode() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getStartCode() { + return startCode_; + } + + private void initFields() { + hostName_ = ""; + port_ = 0; + startCode_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasHostName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getHostNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, port_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, startCode_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getHostNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, port_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, startCode_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) obj; + + boolean result = true; + result = result && (hasHostName() == other.hasHostName()); + if (hasHostName()) { + result = result && getHostName() + .equals(other.getHostName()); + } + result = result && (hasPort() == other.hasPort()); + if (hasPort()) { + result = result && (getPort() + == other.getPort()); + } + result = result && (hasStartCode() == other.hasStartCode()); + if (hasStartCode()) { + result = result && (getStartCode() + == other.getStartCode()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasHostName()) { + hash = (37 * hash) + HOSTNAME_FIELD_NUMBER; + hash = (53 * hash) + getHostName().hashCode(); + } + if (hasPort()) { + hash = (37 * hash) + PORT_FIELD_NUMBER; + hash = (53 * hash) + getPort(); + } + if (hasStartCode()) { + hash = (37 * hash) + STARTCODE_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getStartCode()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_ServerName_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + hostName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + port_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + startCode_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.hostName_ = hostName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.port_ = port_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.startCode_ = startCode_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) return this; + if (other.hasHostName()) { + setHostName(other.getHostName()); + } + if (other.hasPort()) { + setPort(other.getPort()); + } + if (other.hasStartCode()) { + setStartCode(other.getStartCode()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasHostName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + hostName_ = input.readBytes(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + port_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + startCode_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required string hostName = 1; + private java.lang.Object hostName_ = ""; + public boolean hasHostName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getHostName() { + java.lang.Object ref = hostName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + hostName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setHostName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + hostName_ = value; + onChanged(); + return this; + } + public Builder clearHostName() { + bitField0_ = (bitField0_ & ~0x00000001); + hostName_ = getDefaultInstance().getHostName(); + onChanged(); + return this; + } + void setHostName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + hostName_ = value; + onChanged(); + } + + // optional uint32 port = 2; + private int port_ ; + public boolean hasPort() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getPort() { + return port_; + } + public Builder setPort(int value) { + bitField0_ |= 0x00000002; + port_ = value; + onChanged(); + return this; + } + public Builder clearPort() { + bitField0_ = (bitField0_ & ~0x00000002); + port_ = 0; + onChanged(); + return this; + } + + // optional uint64 startCode = 3; + private long startCode_ ; + public boolean hasStartCode() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getStartCode() { + return startCode_; + } + public Builder setStartCode(long value) { + bitField0_ |= 0x00000004; + startCode_ = value; + onChanged(); + return this; + } + public Builder clearStartCode() { + bitField0_ = (bitField0_ & ~0x00000004); + startCode_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ServerName) + } + + static { + defaultInstance = new ServerName(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ServerName) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionInfo_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionInfo_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RegionSpecifier_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RegionSpecifier_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_TimeRange_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_TimeRange_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_KeyValue_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_KeyValue_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ServerName_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ServerName_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\013hbase.proto\"s\n\nRegionInfo\022\020\n\010regionId\030" + + "\001 \002(\004\022\021\n\ttableName\030\002 \002(\014\022\020\n\010startKey\030\003 \001" + + "(\014\022\016\n\006endKey\030\004 \001(\014\022\017\n\007offline\030\005 \001(\010\022\r\n\005s" + + "plit\030\006 \001(\010\"\225\001\n\017RegionSpecifier\0222\n\004type\030\001" + + " \002(\0162$.RegionSpecifier.RegionSpecifierTy" + + "pe\022\r\n\005value\030\002 \002(\014\"?\n\023RegionSpecifierType" + + "\022\017\n\013REGION_NAME\020\001\022\027\n\023ENCODED_REGION_NAME" + + "\020\002\"%\n\tTimeRange\022\014\n\004from\030\001 \001(\004\022\n\n\002to\030\002 \001(" + + "\004\"w\n\010KeyValue\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002 \002" + + "(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttimestamp\030\004 \001(\004", + "\022\031\n\007keyType\030\005 \001(\0162\010.KeyType\022\r\n\005value\030\006 \001" + + "(\014\"?\n\nServerName\022\020\n\010hostName\030\001 \002(\t\022\014\n\004po" + + "rt\030\002 \001(\r\022\021\n\tstartCode\030\003 \001(\004*_\n\007KeyType\022\013" + + "\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELE" + + "TE_COLUMN\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMU" + + "M\020\377\001B>\n*org.apache.hadoop.hbase.protobuf" + + ".generatedB\013HBaseProtosH\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_RegionInfo_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_RegionInfo_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionInfo_descriptor, + new java.lang.String[] { "RegionId", "TableName", "StartKey", "EndKey", "Offline", "Split", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder.class); + internal_static_RegionSpecifier_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_RegionSpecifier_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RegionSpecifier_descriptor, + new java.lang.String[] { "Type", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder.class); + internal_static_TimeRange_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_TimeRange_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_TimeRange_descriptor, + new java.lang.String[] { "From", "To", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder.class); + internal_static_KeyValue_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_KeyValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_KeyValue_descriptor, + new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder.class); + internal_static_ServerName_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_ServerName_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ServerName_descriptor, + new java.lang.String[] { "HostName", "Port", "StartCode", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java new file mode 100644 index 00000000000..216931094ff --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionAdminProtos.java @@ -0,0 +1,15454 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: RegionAdmin.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class RegionAdminProtos { + private RegionAdminProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface GetRegionInfoRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + } + public static final class GetRegionInfoRequest extends + com.google.protobuf.GeneratedMessage + implements GetRegionInfoRequestOrBuilder { + // Use GetRegionInfoRequest.newBuilder() to construct. + private GetRegionInfoRequest(Builder builder) { + super(builder); + } + private GetRegionInfoRequest(boolean noInit) {} + + private static final GetRegionInfoRequest defaultInstance; + public static GetRegionInfoRequest getDefaultInstance() { + return defaultInstance; + } + + public GetRegionInfoRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetRegionInfoRequest) + } + + static { + defaultInstance = new GetRegionInfoRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetRegionInfoRequest) + } + + public interface GetRegionInfoResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionInfo regionInfo = 1; + boolean hasRegionInfo(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); + } + public static final class GetRegionInfoResponse extends + com.google.protobuf.GeneratedMessage + implements GetRegionInfoResponseOrBuilder { + // Use GetRegionInfoResponse.newBuilder() to construct. + private GetRegionInfoResponse(Builder builder) { + super(builder); + } + private GetRegionInfoResponse(boolean noInit) {} + + private static final GetRegionInfoResponse defaultInstance; + public static GetRegionInfoResponse getDefaultInstance() { + return defaultInstance; + } + + public GetRegionInfoResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionInfo regionInfo = 1; + public static final int REGIONINFO_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; + public boolean hasRegionInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { + return regionInfo_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { + return regionInfo_; + } + + private void initFields() { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegionInfo()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegionInfo().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, regionInfo_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, regionInfo_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse) obj; + + boolean result = true; + result = result && (hasRegionInfo() == other.hasRegionInfo()); + if (hasRegionInfo()) { + result = result && getRegionInfo() + .equals(other.getRegionInfo()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegionInfo()) { + hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; + hash = (53 * hash) + getRegionInfo().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionInfoBuilder_ == null) { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + } else { + regionInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionInfoBuilder_ == null) { + result.regionInfo_ = regionInfo_; + } else { + result.regionInfo_ = regionInfoBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this; + if (other.hasRegionInfo()) { + mergeRegionInfo(other.getRegionInfo()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegionInfo()) { + + return false; + } + if (!getRegionInfo().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); + if (hasRegionInfo()) { + subBuilder.mergeFrom(getRegionInfo()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegionInfo(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionInfo regionInfo = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; + public boolean hasRegionInfo() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { + if (regionInfoBuilder_ == null) { + return regionInfo_; + } else { + return regionInfoBuilder_.getMessage(); + } + } + public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + regionInfo_ = value; + onChanged(); + } else { + regionInfoBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegionInfo( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + regionInfo_ = builderForValue.build(); + onChanged(); + } else { + regionInfoBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { + regionInfo_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); + } else { + regionInfo_ = value; + } + onChanged(); + } else { + regionInfoBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegionInfo() { + if (regionInfoBuilder_ == null) { + regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + onChanged(); + } else { + regionInfoBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionInfoFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { + if (regionInfoBuilder_ != null) { + return regionInfoBuilder_.getMessageOrBuilder(); + } else { + return regionInfo_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> + getRegionInfoFieldBuilder() { + if (regionInfoBuilder_ == null) { + regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( + regionInfo_, + getParentForChildren(), + isClean()); + regionInfo_ = null; + } + return regionInfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetRegionInfoResponse) + } + + static { + defaultInstance = new GetRegionInfoResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetRegionInfoResponse) + } + + public interface GetStoreFileListRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated bytes columnFamily = 2; + java.util.List getColumnFamilyList(); + int getColumnFamilyCount(); + com.google.protobuf.ByteString getColumnFamily(int index); + } + public static final class GetStoreFileListRequest extends + com.google.protobuf.GeneratedMessage + implements GetStoreFileListRequestOrBuilder { + // Use GetStoreFileListRequest.newBuilder() to construct. + private GetStoreFileListRequest(Builder builder) { + super(builder); + } + private GetStoreFileListRequest(boolean noInit) {} + + private static final GetStoreFileListRequest defaultInstance; + public static GetStoreFileListRequest getDefaultInstance() { + return defaultInstance; + } + + public GetStoreFileListRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated bytes columnFamily = 2; + public static final int COLUMNFAMILY_FIELD_NUMBER = 2; + private java.util.List columnFamily_; + public java.util.List + getColumnFamilyList() { + return columnFamily_; + } + public int getColumnFamilyCount() { + return columnFamily_.size(); + } + public com.google.protobuf.ByteString getColumnFamily(int index) { + return columnFamily_.get(index); + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + columnFamily_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < columnFamily_.size(); i++) { + output.writeBytes(2, columnFamily_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + { + int dataSize = 0; + for (int i = 0; i < columnFamily_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(columnFamily_.get(i)); + } + size += dataSize; + size += 1 * getColumnFamilyList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getColumnFamilyList() + .equals(other.getColumnFamilyList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getColumnFamilyCount() > 0) { + hash = (37 * hash) + COLUMNFAMILY_FIELD_NUMBER; + hash = (53 * hash) + getColumnFamilyList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + columnFamily_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + columnFamily_ = java.util.Collections.unmodifiableList(columnFamily_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.columnFamily_ = columnFamily_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (!other.columnFamily_.isEmpty()) { + if (columnFamily_.isEmpty()) { + columnFamily_ = other.columnFamily_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureColumnFamilyIsMutable(); + columnFamily_.addAll(other.columnFamily_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + ensureColumnFamilyIsMutable(); + columnFamily_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated bytes columnFamily = 2; + private java.util.List columnFamily_ = java.util.Collections.emptyList();; + private void ensureColumnFamilyIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + columnFamily_ = new java.util.ArrayList(columnFamily_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getColumnFamilyList() { + return java.util.Collections.unmodifiableList(columnFamily_); + } + public int getColumnFamilyCount() { + return columnFamily_.size(); + } + public com.google.protobuf.ByteString getColumnFamily(int index) { + return columnFamily_.get(index); + } + public Builder setColumnFamily( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnFamilyIsMutable(); + columnFamily_.set(index, value); + onChanged(); + return this; + } + public Builder addColumnFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnFamilyIsMutable(); + columnFamily_.add(value); + onChanged(); + return this; + } + public Builder addAllColumnFamily( + java.lang.Iterable values) { + ensureColumnFamilyIsMutable(); + super.addAll(values, columnFamily_); + onChanged(); + return this; + } + public Builder clearColumnFamily() { + columnFamily_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetStoreFileListRequest) + } + + static { + defaultInstance = new GetStoreFileListRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetStoreFileListRequest) + } + + public interface GetStoreFileListResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated string storeFile = 1; + java.util.List getStoreFileList(); + int getStoreFileCount(); + String getStoreFile(int index); + } + public static final class GetStoreFileListResponse extends + com.google.protobuf.GeneratedMessage + implements GetStoreFileListResponseOrBuilder { + // Use GetStoreFileListResponse.newBuilder() to construct. + private GetStoreFileListResponse(Builder builder) { + super(builder); + } + private GetStoreFileListResponse(boolean noInit) {} + + private static final GetStoreFileListResponse defaultInstance; + public static GetStoreFileListResponse getDefaultInstance() { + return defaultInstance; + } + + public GetStoreFileListResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; + } + + // repeated string storeFile = 1; + public static final int STOREFILE_FIELD_NUMBER = 1; + private com.google.protobuf.LazyStringList storeFile_; + public java.util.List + getStoreFileList() { + return storeFile_; + } + public int getStoreFileCount() { + return storeFile_.size(); + } + public String getStoreFile(int index) { + return storeFile_.get(index); + } + + private void initFields() { + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < storeFile_.size(); i++) { + output.writeBytes(1, storeFile_.getByteString(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < storeFile_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(storeFile_.getByteString(i)); + } + size += dataSize; + size += 1 * getStoreFileList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse) obj; + + boolean result = true; + result = result && getStoreFileList() + .equals(other.getStoreFileList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getStoreFileCount() > 0) { + hash = (37 * hash) + STOREFILE_FIELD_NUMBER; + hash = (53 * hash) + getStoreFileList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetStoreFileListResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( + storeFile_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.storeFile_ = storeFile_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance()) return this; + if (!other.storeFile_.isEmpty()) { + if (storeFile_.isEmpty()) { + storeFile_ = other.storeFile_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureStoreFileIsMutable(); + storeFile_.addAll(other.storeFile_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureStoreFileIsMutable(); + storeFile_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated string storeFile = 1; + private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureStoreFileIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getStoreFileList() { + return java.util.Collections.unmodifiableList(storeFile_); + } + public int getStoreFileCount() { + return storeFile_.size(); + } + public String getStoreFile(int index) { + return storeFile_.get(index); + } + public Builder setStoreFile( + int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); + storeFile_.set(index, value); + onChanged(); + return this; + } + public Builder addStoreFile(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureStoreFileIsMutable(); + storeFile_.add(value); + onChanged(); + return this; + } + public Builder addAllStoreFile( + java.lang.Iterable values) { + ensureStoreFileIsMutable(); + super.addAll(values, storeFile_); + onChanged(); + return this; + } + public Builder clearStoreFile() { + storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + void addStoreFile(com.google.protobuf.ByteString value) { + ensureStoreFileIsMutable(); + storeFile_.add(value); + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:GetStoreFileListResponse) + } + + static { + defaultInstance = new GetStoreFileListResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetStoreFileListResponse) + } + + public interface GetOnlineRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class GetOnlineRegionRequest extends + com.google.protobuf.GeneratedMessage + implements GetOnlineRegionRequestOrBuilder { + // Use GetOnlineRegionRequest.newBuilder() to construct. + private GetOnlineRegionRequest(Builder builder) { + super(builder); + } + private GetOnlineRegionRequest(boolean noInit) {} + + private static final GetOnlineRegionRequest defaultInstance; + public static GetOnlineRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public GetOnlineRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:GetOnlineRegionRequest) + } + + static { + defaultInstance = new GetOnlineRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetOnlineRegionRequest) + } + + public interface GetOnlineRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .RegionInfo regionInfo = 1; + java.util.List + getRegionInfoList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index); + int getRegionInfoCount(); + java.util.List + getRegionInfoOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( + int index); + } + public static final class GetOnlineRegionResponse extends + com.google.protobuf.GeneratedMessage + implements GetOnlineRegionResponseOrBuilder { + // Use GetOnlineRegionResponse.newBuilder() to construct. + private GetOnlineRegionResponse(Builder builder) { + super(builder); + } + private GetOnlineRegionResponse(boolean noInit) {} + + private static final GetOnlineRegionResponse defaultInstance; + public static GetOnlineRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public GetOnlineRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + } + + // repeated .RegionInfo regionInfo = 1; + public static final int REGIONINFO_FIELD_NUMBER = 1; + private java.util.List regionInfo_; + public java.util.List getRegionInfoList() { + return regionInfo_; + } + public java.util.List + getRegionInfoOrBuilderList() { + return regionInfo_; + } + public int getRegionInfoCount() { + return regionInfo_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { + return regionInfo_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( + int index) { + return regionInfo_.get(index); + } + + private void initFields() { + regionInfo_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getRegionInfoCount(); i++) { + if (!getRegionInfo(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < regionInfo_.size(); i++) { + output.writeMessage(1, regionInfo_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < regionInfo_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, regionInfo_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse) obj; + + boolean result = true; + result = result && getRegionInfoList() + .equals(other.getRegionInfoList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRegionInfoCount() > 0) { + hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; + hash = (53 * hash) + getRegionInfoList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionInfoFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionInfoBuilder_ == null) { + regionInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + regionInfoBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse(this); + int from_bitField0_ = bitField0_; + if (regionInfoBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.regionInfo_ = regionInfo_; + } else { + result.regionInfo_ = regionInfoBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this; + if (regionInfoBuilder_ == null) { + if (!other.regionInfo_.isEmpty()) { + if (regionInfo_.isEmpty()) { + regionInfo_ = other.regionInfo_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegionInfoIsMutable(); + regionInfo_.addAll(other.regionInfo_); + } + onChanged(); + } + } else { + if (!other.regionInfo_.isEmpty()) { + if (regionInfoBuilder_.isEmpty()) { + regionInfoBuilder_.dispose(); + regionInfoBuilder_ = null; + regionInfo_ = other.regionInfo_; + bitField0_ = (bitField0_ & ~0x00000001); + regionInfoBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRegionInfoFieldBuilder() : null; + } else { + regionInfoBuilder_.addAllMessages(other.regionInfo_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getRegionInfoCount(); i++) { + if (!getRegionInfo(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addRegionInfo(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .RegionInfo regionInfo = 1; + private java.util.List regionInfo_ = + java.util.Collections.emptyList(); + private void ensureRegionInfoIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + regionInfo_ = new java.util.ArrayList(regionInfo_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; + + public java.util.List getRegionInfoList() { + if (regionInfoBuilder_ == null) { + return java.util.Collections.unmodifiableList(regionInfo_); + } else { + return regionInfoBuilder_.getMessageList(); + } + } + public int getRegionInfoCount() { + if (regionInfoBuilder_ == null) { + return regionInfo_.size(); + } else { + return regionInfoBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { + if (regionInfoBuilder_ == null) { + return regionInfo_.get(index); + } else { + return regionInfoBuilder_.getMessage(index); + } + } + public Builder setRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionInfoIsMutable(); + regionInfo_.set(index, value); + onChanged(); + } else { + regionInfoBuilder_.setMessage(index, value); + } + return this; + } + public Builder setRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.set(index, builderForValue.build()); + onChanged(); + } else { + regionInfoBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionInfoIsMutable(); + regionInfo_.add(value); + onChanged(); + } else { + regionInfoBuilder_.addMessage(value); + } + return this; + } + public Builder addRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { + if (regionInfoBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionInfoIsMutable(); + regionInfo_.add(index, value); + onChanged(); + } else { + regionInfoBuilder_.addMessage(index, value); + } + return this; + } + public Builder addRegionInfo( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.add(builderForValue.build()); + onChanged(); + } else { + regionInfoBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addRegionInfo( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.add(index, builderForValue.build()); + onChanged(); + } else { + regionInfoBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllRegionInfo( + java.lang.Iterable values) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + super.addAll(values, regionInfo_); + onChanged(); + } else { + regionInfoBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearRegionInfo() { + if (regionInfoBuilder_ == null) { + regionInfo_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + regionInfoBuilder_.clear(); + } + return this; + } + public Builder removeRegionInfo(int index) { + if (regionInfoBuilder_ == null) { + ensureRegionInfoIsMutable(); + regionInfo_.remove(index); + onChanged(); + } else { + regionInfoBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder( + int index) { + return getRegionInfoFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( + int index) { + if (regionInfoBuilder_ == null) { + return regionInfo_.get(index); } else { + return regionInfoBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getRegionInfoOrBuilderList() { + if (regionInfoBuilder_ != null) { + return regionInfoBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(regionInfo_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() { + return getRegionInfoFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder( + int index) { + return getRegionInfoFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); + } + public java.util.List + getRegionInfoBuilderList() { + return getRegionInfoFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> + getRegionInfoFieldBuilder() { + if (regionInfoBuilder_ == null) { + regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( + regionInfo_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + regionInfo_ = null; + } + return regionInfoBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetOnlineRegionResponse) + } + + static { + defaultInstance = new GetOnlineRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetOnlineRegionResponse) + } + + public interface OpenRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .RegionSpecifier region = 1; + java.util.List + getRegionList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index); + int getRegionCount(); + java.util.List + getRegionOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( + int index); + + // optional uint32 versionOfOfflineNode = 2; + boolean hasVersionOfOfflineNode(); + int getVersionOfOfflineNode(); + } + public static final class OpenRegionRequest extends + com.google.protobuf.GeneratedMessage + implements OpenRegionRequestOrBuilder { + // Use OpenRegionRequest.newBuilder() to construct. + private OpenRegionRequest(Builder builder) { + super(builder); + } + private OpenRegionRequest(boolean noInit) {} + + private static final OpenRegionRequest defaultInstance; + public static OpenRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public OpenRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // repeated .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private java.util.List region_; + public java.util.List getRegionList() { + return region_; + } + public java.util.List + getRegionOrBuilderList() { + return region_; + } + public int getRegionCount() { + return region_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) { + return region_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( + int index) { + return region_.get(index); + } + + // optional uint32 versionOfOfflineNode = 2; + public static final int VERSIONOFOFFLINENODE_FIELD_NUMBER = 2; + private int versionOfOfflineNode_; + public boolean hasVersionOfOfflineNode() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public int getVersionOfOfflineNode() { + return versionOfOfflineNode_; + } + + private void initFields() { + region_ = java.util.Collections.emptyList(); + versionOfOfflineNode_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getRegionCount(); i++) { + if (!getRegion(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < region_.size(); i++) { + output.writeMessage(1, region_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt32(2, versionOfOfflineNode_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < region_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, versionOfOfflineNode_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest) obj; + + boolean result = true; + result = result && getRegionList() + .equals(other.getRegionList()); + result = result && (hasVersionOfOfflineNode() == other.hasVersionOfOfflineNode()); + if (hasVersionOfOfflineNode()) { + result = result && (getVersionOfOfflineNode() + == other.getVersionOfOfflineNode()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRegionCount() > 0) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegionList().hashCode(); + } + if (hasVersionOfOfflineNode()) { + hash = (37 * hash) + VERSIONOFOFFLINENODE_FIELD_NUMBER; + hash = (53 * hash) + getVersionOfOfflineNode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + regionBuilder_.clear(); + } + versionOfOfflineNode_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + region_ = java.util.Collections.unmodifiableList(region_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.versionOfOfflineNode_ = versionOfOfflineNode_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance()) return this; + if (regionBuilder_ == null) { + if (!other.region_.isEmpty()) { + if (region_.isEmpty()) { + region_ = other.region_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegionIsMutable(); + region_.addAll(other.region_); + } + onChanged(); + } + } else { + if (!other.region_.isEmpty()) { + if (regionBuilder_.isEmpty()) { + regionBuilder_.dispose(); + regionBuilder_ = null; + region_ = other.region_; + bitField0_ = (bitField0_ & ~0x00000001); + regionBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRegionFieldBuilder() : null; + } else { + regionBuilder_.addAllMessages(other.region_); + } + } + } + if (other.hasVersionOfOfflineNode()) { + setVersionOfOfflineNode(other.getVersionOfOfflineNode()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getRegionCount(); i++) { + if (!getRegion(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + versionOfOfflineNode_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .RegionSpecifier region = 1; + private java.util.List region_ = + java.util.Collections.emptyList(); + private void ensureRegionIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + region_ = new java.util.ArrayList(region_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + + public java.util.List getRegionList() { + if (regionBuilder_ == null) { + return java.util.Collections.unmodifiableList(region_); + } else { + return regionBuilder_.getMessageList(); + } + } + public int getRegionCount() { + if (regionBuilder_ == null) { + return region_.size(); + } else { + return regionBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(int index) { + if (regionBuilder_ == null) { + return region_.get(index); + } else { + return regionBuilder_.getMessage(index); + } + } + public Builder setRegion( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionIsMutable(); + region_.set(index, value); + onChanged(); + } else { + regionBuilder_.setMessage(index, value); + } + return this; + } + public Builder setRegion( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + region_.set(index, builderForValue.build()); + onChanged(); + } else { + regionBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionIsMutable(); + region_.add(value); + onChanged(); + } else { + regionBuilder_.addMessage(value); + } + return this; + } + public Builder addRegion( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionIsMutable(); + region_.add(index, value); + onChanged(); + } else { + regionBuilder_.addMessage(index, value); + } + return this; + } + public Builder addRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + region_.add(builderForValue.build()); + onChanged(); + } else { + regionBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addRegion( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + region_.add(index, builderForValue.build()); + onChanged(); + } else { + regionBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllRegion( + java.lang.Iterable values) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + super.addAll(values, region_); + onChanged(); + } else { + regionBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + regionBuilder_.clear(); + } + return this; + } + public Builder removeRegion(int index) { + if (regionBuilder_ == null) { + ensureRegionIsMutable(); + region_.remove(index); + onChanged(); + } else { + regionBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder( + int index) { + return getRegionFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder( + int index) { + if (regionBuilder_ == null) { + return region_.get(index); } else { + return regionBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getRegionOrBuilderList() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(region_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder() { + return getRegionFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder addRegionBuilder( + int index) { + return getRegionFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()); + } + public java.util.List + getRegionBuilderList() { + return getRegionFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional uint32 versionOfOfflineNode = 2; + private int versionOfOfflineNode_ ; + public boolean hasVersionOfOfflineNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfOfflineNode() { + return versionOfOfflineNode_; + } + public Builder setVersionOfOfflineNode(int value) { + bitField0_ |= 0x00000002; + versionOfOfflineNode_ = value; + onChanged(); + return this; + } + public Builder clearVersionOfOfflineNode() { + bitField0_ = (bitField0_ & ~0x00000002); + versionOfOfflineNode_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:OpenRegionRequest) + } + + static { + defaultInstance = new OpenRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OpenRegionRequest) + } + + public interface OpenRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + java.util.List getOpeningStateList(); + int getOpeningStateCount(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index); + } + public static final class OpenRegionResponse extends + com.google.protobuf.GeneratedMessage + implements OpenRegionResponseOrBuilder { + // Use OpenRegionResponse.newBuilder() to construct. + private OpenRegionResponse(Builder builder) { + super(builder); + } + private OpenRegionResponse(boolean noInit) {} + + private static final OpenRegionResponse defaultInstance; + public static OpenRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public OpenRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + } + + public enum RegionOpeningState + implements com.google.protobuf.ProtocolMessageEnum { + OPENED(0, 0), + ALREADY_OPENED(1, 1), + FAILED_OPENING(2, 2), + ; + + public static final int OPENED_VALUE = 0; + public static final int ALREADY_OPENED_VALUE = 1; + public static final int FAILED_OPENING_VALUE = 2; + + + public final int getNumber() { return value; } + + public static RegionOpeningState valueOf(int value) { + switch (value) { + case 0: return OPENED; + case 1: return ALREADY_OPENED; + case 2: return FAILED_OPENING; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public RegionOpeningState findValueByNumber(int number) { + return RegionOpeningState.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0); + } + + private static final RegionOpeningState[] VALUES = { + OPENED, ALREADY_OPENED, FAILED_OPENING, + }; + + public static RegionOpeningState valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private RegionOpeningState(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:OpenRegionResponse.RegionOpeningState) + } + + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + public static final int OPENINGSTATE_FIELD_NUMBER = 1; + private java.util.List openingState_; + public java.util.List getOpeningStateList() { + return openingState_; + } + public int getOpeningStateCount() { + return openingState_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { + return openingState_.get(index); + } + + private void initFields() { + openingState_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < openingState_.size(); i++) { + output.writeEnum(1, openingState_.get(i).getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < openingState_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeEnumSizeNoTag(openingState_.get(i).getNumber()); + } + size += dataSize; + size += 1 * openingState_.size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse) obj; + + boolean result = true; + result = result && getOpeningStateList() + .equals(other.getOpeningStateList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getOpeningStateCount() > 0) { + hash = (37 * hash) + OPENINGSTATE_FIELD_NUMBER; + hash = (53 * hash) + hashEnumList(getOpeningStateList()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + openingState_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + openingState_ = java.util.Collections.unmodifiableList(openingState_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.openingState_ = openingState_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance()) return this; + if (!other.openingState_.isEmpty()) { + if (openingState_.isEmpty()) { + openingState_ = other.openingState_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureOpeningStateIsMutable(); + openingState_.addAll(other.openingState_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + addOpeningState(value); + } + break; + } + case 10: { + int length = input.readRawVarint32(); + int oldLimit = input.pushLimit(length); + while(input.getBytesUntilLimit() > 0) { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(1, rawValue); + } else { + addOpeningState(value); + } + } + input.popLimit(oldLimit); + break; + } + } + } + } + + private int bitField0_; + + // repeated .OpenRegionResponse.RegionOpeningState openingState = 1; + private java.util.List openingState_ = + java.util.Collections.emptyList(); + private void ensureOpeningStateIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + openingState_ = new java.util.ArrayList(openingState_); + bitField0_ |= 0x00000001; + } + } + public java.util.List getOpeningStateList() { + return java.util.Collections.unmodifiableList(openingState_); + } + public int getOpeningStateCount() { + return openingState_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { + return openingState_.get(index); + } + public Builder setOpeningState( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpeningStateIsMutable(); + openingState_.set(index, value); + onChanged(); + return this; + } + public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.RegionOpeningState value) { + if (value == null) { + throw new NullPointerException(); + } + ensureOpeningStateIsMutable(); + openingState_.add(value); + onChanged(); + return this; + } + public Builder addAllOpeningState( + java.lang.Iterable values) { + ensureOpeningStateIsMutable(); + super.addAll(values, openingState_); + onChanged(); + return this; + } + public Builder clearOpeningState() { + openingState_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:OpenRegionResponse) + } + + static { + defaultInstance = new OpenRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:OpenRegionResponse) + } + + public interface CloseRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional uint32 versionOfClosingNode = 2; + boolean hasVersionOfClosingNode(); + int getVersionOfClosingNode(); + + // optional bool transitionInZK = 3 [default = true]; + boolean hasTransitionInZK(); + boolean getTransitionInZK(); + } + public static final class CloseRegionRequest extends + com.google.protobuf.GeneratedMessage + implements CloseRegionRequestOrBuilder { + // Use CloseRegionRequest.newBuilder() to construct. + private CloseRegionRequest(Builder builder) { + super(builder); + } + private CloseRegionRequest(boolean noInit) {} + + private static final CloseRegionRequest defaultInstance; + public static CloseRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public CloseRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional uint32 versionOfClosingNode = 2; + public static final int VERSIONOFCLOSINGNODE_FIELD_NUMBER = 2; + private int versionOfClosingNode_; + public boolean hasVersionOfClosingNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfClosingNode() { + return versionOfClosingNode_; + } + + // optional bool transitionInZK = 3 [default = true]; + public static final int TRANSITIONINZK_FIELD_NUMBER = 3; + private boolean transitionInZK_; + public boolean hasTransitionInZK() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getTransitionInZK() { + return transitionInZK_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + versionOfClosingNode_ = 0; + transitionInZK_ = true; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, versionOfClosingNode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(3, transitionInZK_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, versionOfClosingNode_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, transitionInZK_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasVersionOfClosingNode() == other.hasVersionOfClosingNode()); + if (hasVersionOfClosingNode()) { + result = result && (getVersionOfClosingNode() + == other.getVersionOfClosingNode()); + } + result = result && (hasTransitionInZK() == other.hasTransitionInZK()); + if (hasTransitionInZK()) { + result = result && (getTransitionInZK() + == other.getTransitionInZK()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasVersionOfClosingNode()) { + hash = (37 * hash) + VERSIONOFCLOSINGNODE_FIELD_NUMBER; + hash = (53 * hash) + getVersionOfClosingNode(); + } + if (hasTransitionInZK()) { + hash = (37 * hash) + TRANSITIONINZK_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getTransitionInZK()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + versionOfClosingNode_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + transitionInZK_ = true; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.versionOfClosingNode_ = versionOfClosingNode_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.transitionInZK_ = transitionInZK_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasVersionOfClosingNode()) { + setVersionOfClosingNode(other.getVersionOfClosingNode()); + } + if (other.hasTransitionInZK()) { + setTransitionInZK(other.getTransitionInZK()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + versionOfClosingNode_ = input.readUInt32(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + transitionInZK_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional uint32 versionOfClosingNode = 2; + private int versionOfClosingNode_ ; + public boolean hasVersionOfClosingNode() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getVersionOfClosingNode() { + return versionOfClosingNode_; + } + public Builder setVersionOfClosingNode(int value) { + bitField0_ |= 0x00000002; + versionOfClosingNode_ = value; + onChanged(); + return this; + } + public Builder clearVersionOfClosingNode() { + bitField0_ = (bitField0_ & ~0x00000002); + versionOfClosingNode_ = 0; + onChanged(); + return this; + } + + // optional bool transitionInZK = 3 [default = true]; + private boolean transitionInZK_ = true; + public boolean hasTransitionInZK() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getTransitionInZK() { + return transitionInZK_; + } + public Builder setTransitionInZK(boolean value) { + bitField0_ |= 0x00000004; + transitionInZK_ = value; + onChanged(); + return this; + } + public Builder clearTransitionInZK() { + bitField0_ = (bitField0_ & ~0x00000004); + transitionInZK_ = true; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CloseRegionRequest) + } + + static { + defaultInstance = new CloseRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CloseRegionRequest) + } + + public interface CloseRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool closed = 1; + boolean hasClosed(); + boolean getClosed(); + } + public static final class CloseRegionResponse extends + com.google.protobuf.GeneratedMessage + implements CloseRegionResponseOrBuilder { + // Use CloseRegionResponse.newBuilder() to construct. + private CloseRegionResponse(Builder builder) { + super(builder); + } + private CloseRegionResponse(boolean noInit) {} + + private static final CloseRegionResponse defaultInstance; + public static CloseRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public CloseRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool closed = 1; + public static final int CLOSED_FIELD_NUMBER = 1; + private boolean closed_; + public boolean hasClosed() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getClosed() { + return closed_; + } + + private void initFields() { + closed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasClosed()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, closed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, closed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse) obj; + + boolean result = true; + result = result && (hasClosed() == other.hasClosed()); + if (hasClosed()) { + result = result && (getClosed() + == other.getClosed()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasClosed()) { + hash = (37 * hash) + CLOSED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getClosed()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + closed_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.closed_ = closed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance()) return this; + if (other.hasClosed()) { + setClosed(other.getClosed()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasClosed()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + closed_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool closed = 1; + private boolean closed_ ; + public boolean hasClosed() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getClosed() { + return closed_; + } + public Builder setClosed(boolean value) { + bitField0_ |= 0x00000001; + closed_ = value; + onChanged(); + return this; + } + public Builder clearClosed() { + bitField0_ = (bitField0_ & ~0x00000001); + closed_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CloseRegionResponse) + } + + static { + defaultInstance = new CloseRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CloseRegionResponse) + } + + public interface FlushRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional uint64 ifOlderThanTs = 2; + boolean hasIfOlderThanTs(); + long getIfOlderThanTs(); + } + public static final class FlushRegionRequest extends + com.google.protobuf.GeneratedMessage + implements FlushRegionRequestOrBuilder { + // Use FlushRegionRequest.newBuilder() to construct. + private FlushRegionRequest(Builder builder) { + super(builder); + } + private FlushRegionRequest(boolean noInit) {} + + private static final FlushRegionRequest defaultInstance; + public static FlushRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public FlushRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional uint64 ifOlderThanTs = 2; + public static final int IFOLDERTHANTS_FIELD_NUMBER = 2; + private long ifOlderThanTs_; + public boolean hasIfOlderThanTs() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getIfOlderThanTs() { + return ifOlderThanTs_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + ifOlderThanTs_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, ifOlderThanTs_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, ifOlderThanTs_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasIfOlderThanTs() == other.hasIfOlderThanTs()); + if (hasIfOlderThanTs()) { + result = result && (getIfOlderThanTs() + == other.getIfOlderThanTs()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasIfOlderThanTs()) { + hash = (37 * hash) + IFOLDERTHANTS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getIfOlderThanTs()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + ifOlderThanTs_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ifOlderThanTs_ = ifOlderThanTs_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasIfOlderThanTs()) { + setIfOlderThanTs(other.getIfOlderThanTs()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + ifOlderThanTs_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional uint64 ifOlderThanTs = 2; + private long ifOlderThanTs_ ; + public boolean hasIfOlderThanTs() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getIfOlderThanTs() { + return ifOlderThanTs_; + } + public Builder setIfOlderThanTs(long value) { + bitField0_ |= 0x00000002; + ifOlderThanTs_ = value; + onChanged(); + return this; + } + public Builder clearIfOlderThanTs() { + bitField0_ = (bitField0_ & ~0x00000002); + ifOlderThanTs_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:FlushRegionRequest) + } + + static { + defaultInstance = new FlushRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FlushRegionRequest) + } + + public interface FlushRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 lastFlushTime = 1; + boolean hasLastFlushTime(); + long getLastFlushTime(); + + // optional bool flushed = 2; + boolean hasFlushed(); + boolean getFlushed(); + } + public static final class FlushRegionResponse extends + com.google.protobuf.GeneratedMessage + implements FlushRegionResponseOrBuilder { + // Use FlushRegionResponse.newBuilder() to construct. + private FlushRegionResponse(Builder builder) { + super(builder); + } + private FlushRegionResponse(boolean noInit) {} + + private static final FlushRegionResponse defaultInstance; + public static FlushRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public FlushRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 lastFlushTime = 1; + public static final int LASTFLUSHTIME_FIELD_NUMBER = 1; + private long lastFlushTime_; + public boolean hasLastFlushTime() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLastFlushTime() { + return lastFlushTime_; + } + + // optional bool flushed = 2; + public static final int FLUSHED_FIELD_NUMBER = 2; + private boolean flushed_; + public boolean hasFlushed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getFlushed() { + return flushed_; + } + + private void initFields() { + lastFlushTime_ = 0L; + flushed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLastFlushTime()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, lastFlushTime_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, flushed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, lastFlushTime_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, flushed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse) obj; + + boolean result = true; + result = result && (hasLastFlushTime() == other.hasLastFlushTime()); + if (hasLastFlushTime()) { + result = result && (getLastFlushTime() + == other.getLastFlushTime()); + } + result = result && (hasFlushed() == other.hasFlushed()); + if (hasFlushed()) { + result = result && (getFlushed() + == other.getFlushed()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLastFlushTime()) { + hash = (37 * hash) + LASTFLUSHTIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLastFlushTime()); + } + if (hasFlushed()) { + hash = (37 * hash) + FLUSHED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getFlushed()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + lastFlushTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + flushed_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lastFlushTime_ = lastFlushTime_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.flushed_ = flushed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance()) return this; + if (other.hasLastFlushTime()) { + setLastFlushTime(other.getLastFlushTime()); + } + if (other.hasFlushed()) { + setFlushed(other.getFlushed()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLastFlushTime()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lastFlushTime_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + flushed_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 lastFlushTime = 1; + private long lastFlushTime_ ; + public boolean hasLastFlushTime() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLastFlushTime() { + return lastFlushTime_; + } + public Builder setLastFlushTime(long value) { + bitField0_ |= 0x00000001; + lastFlushTime_ = value; + onChanged(); + return this; + } + public Builder clearLastFlushTime() { + bitField0_ = (bitField0_ & ~0x00000001); + lastFlushTime_ = 0L; + onChanged(); + return this; + } + + // optional bool flushed = 2; + private boolean flushed_ ; + public boolean hasFlushed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getFlushed() { + return flushed_; + } + public Builder setFlushed(boolean value) { + bitField0_ |= 0x00000002; + flushed_ = value; + onChanged(); + return this; + } + public Builder clearFlushed() { + bitField0_ = (bitField0_ & ~0x00000002); + flushed_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:FlushRegionResponse) + } + + static { + defaultInstance = new FlushRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FlushRegionResponse) + } + + public interface SplitRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional bytes splitPoint = 2; + boolean hasSplitPoint(); + com.google.protobuf.ByteString getSplitPoint(); + } + public static final class SplitRegionRequest extends + com.google.protobuf.GeneratedMessage + implements SplitRegionRequestOrBuilder { + // Use SplitRegionRequest.newBuilder() to construct. + private SplitRegionRequest(Builder builder) { + super(builder); + } + private SplitRegionRequest(boolean noInit) {} + + private static final SplitRegionRequest defaultInstance; + public static SplitRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public SplitRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional bytes splitPoint = 2; + public static final int SPLITPOINT_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString splitPoint_; + public boolean hasSplitPoint() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSplitPoint() { + return splitPoint_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + splitPoint_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, splitPoint_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, splitPoint_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasSplitPoint() == other.hasSplitPoint()); + if (hasSplitPoint()) { + result = result && getSplitPoint() + .equals(other.getSplitPoint()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasSplitPoint()) { + hash = (37 * hash) + SPLITPOINT_FIELD_NUMBER; + hash = (53 * hash) + getSplitPoint().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + splitPoint_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.splitPoint_ = splitPoint_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasSplitPoint()) { + setSplitPoint(other.getSplitPoint()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + bitField0_ |= 0x00000002; + splitPoint_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional bytes splitPoint = 2; + private com.google.protobuf.ByteString splitPoint_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasSplitPoint() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getSplitPoint() { + return splitPoint_; + } + public Builder setSplitPoint(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + splitPoint_ = value; + onChanged(); + return this; + } + public Builder clearSplitPoint() { + bitField0_ = (bitField0_ & ~0x00000002); + splitPoint_ = getDefaultInstance().getSplitPoint(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:SplitRegionRequest) + } + + static { + defaultInstance = new SplitRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SplitRegionRequest) + } + + public interface SplitRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class SplitRegionResponse extends + com.google.protobuf.GeneratedMessage + implements SplitRegionResponseOrBuilder { + // Use SplitRegionResponse.newBuilder() to construct. + private SplitRegionResponse(Builder builder) { + super(builder); + } + private SplitRegionResponse(boolean noInit) {} + + private static final SplitRegionResponse defaultInstance; + public static SplitRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public SplitRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_SplitRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:SplitRegionResponse) + } + + static { + defaultInstance = new SplitRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:SplitRegionResponse) + } + + public interface CompactRegionRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // optional bool major = 2; + boolean hasMajor(); + boolean getMajor(); + } + public static final class CompactRegionRequest extends + com.google.protobuf.GeneratedMessage + implements CompactRegionRequestOrBuilder { + // Use CompactRegionRequest.newBuilder() to construct. + private CompactRegionRequest(Builder builder) { + super(builder); + } + private CompactRegionRequest(boolean noInit) {} + + private static final CompactRegionRequest defaultInstance; + public static CompactRegionRequest getDefaultInstance() { + return defaultInstance; + } + + public CompactRegionRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // optional bool major = 2; + public static final int MAJOR_FIELD_NUMBER = 2; + private boolean major_; + public boolean hasMajor() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMajor() { + return major_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + major_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, major_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, major_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasMajor() == other.hasMajor()); + if (hasMajor()) { + result = result && (getMajor() + == other.getMajor()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasMajor()) { + hash = (37 * hash) + MAJOR_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getMajor()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + major_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.major_ = major_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasMajor()) { + setMajor(other.getMajor()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + major_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // optional bool major = 2; + private boolean major_ ; + public boolean hasMajor() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMajor() { + return major_; + } + public Builder setMajor(boolean value) { + bitField0_ |= 0x00000002; + major_ = value; + onChanged(); + return this; + } + public Builder clearMajor() { + bitField0_ = (bitField0_ & ~0x00000002); + major_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:CompactRegionRequest) + } + + static { + defaultInstance = new CompactRegionRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CompactRegionRequest) + } + + public interface CompactRegionResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class CompactRegionResponse extends + com.google.protobuf.GeneratedMessage + implements CompactRegionResponseOrBuilder { + // Use CompactRegionResponse.newBuilder() to construct. + private CompactRegionResponse(Builder builder) { + super(builder); + } + private CompactRegionResponse(boolean noInit) {} + + private static final CompactRegionResponse defaultInstance; + public static CompactRegionResponse getDefaultInstance() { + return defaultInstance; + } + + public CompactRegionResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_CompactRegionResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:CompactRegionResponse) + } + + static { + defaultInstance = new CompactRegionResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CompactRegionResponse) + } + + public interface UUIDOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 leastSigBits = 1; + boolean hasLeastSigBits(); + long getLeastSigBits(); + + // required uint64 mostSigBits = 2; + boolean hasMostSigBits(); + long getMostSigBits(); + } + public static final class UUID extends + com.google.protobuf.GeneratedMessage + implements UUIDOrBuilder { + // Use UUID.newBuilder() to construct. + private UUID(Builder builder) { + super(builder); + } + private UUID(boolean noInit) {} + + private static final UUID defaultInstance; + public static UUID getDefaultInstance() { + return defaultInstance; + } + + public UUID getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 leastSigBits = 1; + public static final int LEASTSIGBITS_FIELD_NUMBER = 1; + private long leastSigBits_; + public boolean hasLeastSigBits() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLeastSigBits() { + return leastSigBits_; + } + + // required uint64 mostSigBits = 2; + public static final int MOSTSIGBITS_FIELD_NUMBER = 2; + private long mostSigBits_; + public boolean hasMostSigBits() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getMostSigBits() { + return mostSigBits_; + } + + private void initFields() { + leastSigBits_ = 0L; + mostSigBits_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLeastSigBits()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMostSigBits()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, leastSigBits_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, mostSigBits_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, leastSigBits_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, mostSigBits_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID) obj; + + boolean result = true; + result = result && (hasLeastSigBits() == other.hasLeastSigBits()); + if (hasLeastSigBits()) { + result = result && (getLeastSigBits() + == other.getLeastSigBits()); + } + result = result && (hasMostSigBits() == other.hasMostSigBits()); + if (hasMostSigBits()) { + result = result && (getMostSigBits() + == other.getMostSigBits()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLeastSigBits()) { + hash = (37 * hash) + LEASTSIGBITS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLeastSigBits()); + } + if (hasMostSigBits()) { + hash = (37 * hash) + MOSTSIGBITS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getMostSigBits()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_UUID_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + leastSigBits_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + mostSigBits_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.leastSigBits_ = leastSigBits_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.mostSigBits_ = mostSigBits_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance()) return this; + if (other.hasLeastSigBits()) { + setLeastSigBits(other.getLeastSigBits()); + } + if (other.hasMostSigBits()) { + setMostSigBits(other.getMostSigBits()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLeastSigBits()) { + + return false; + } + if (!hasMostSigBits()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + leastSigBits_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + mostSigBits_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 leastSigBits = 1; + private long leastSigBits_ ; + public boolean hasLeastSigBits() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLeastSigBits() { + return leastSigBits_; + } + public Builder setLeastSigBits(long value) { + bitField0_ |= 0x00000001; + leastSigBits_ = value; + onChanged(); + return this; + } + public Builder clearLeastSigBits() { + bitField0_ = (bitField0_ & ~0x00000001); + leastSigBits_ = 0L; + onChanged(); + return this; + } + + // required uint64 mostSigBits = 2; + private long mostSigBits_ ; + public boolean hasMostSigBits() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getMostSigBits() { + return mostSigBits_; + } + public Builder setMostSigBits(long value) { + bitField0_ |= 0x00000002; + mostSigBits_ = value; + onChanged(); + return this; + } + public Builder clearMostSigBits() { + bitField0_ = (bitField0_ & ~0x00000002); + mostSigBits_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UUID) + } + + static { + defaultInstance = new UUID(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UUID) + } + + public interface WALEntryOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .WALEntry.WALKey walKey = 1; + boolean hasWalKey(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getWalKey(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder(); + + // required .WALEntry.WALEdit edit = 2; + boolean hasEdit(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getEdit(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder(); + } + public static final class WALEntry extends + com.google.protobuf.GeneratedMessage + implements WALEntryOrBuilder { + // Use WALEntry.newBuilder() to construct. + private WALEntry(Builder builder) { + super(builder); + } + private WALEntry(boolean noInit) {} + + private static final WALEntry defaultInstance; + public static WALEntry getDefaultInstance() { + return defaultInstance; + } + + public WALEntry getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_fieldAccessorTable; + } + + public interface WALKeyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes encodedRegionName = 1; + boolean hasEncodedRegionName(); + com.google.protobuf.ByteString getEncodedRegionName(); + + // required bytes tableName = 2; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required uint64 logSequenceNumber = 3; + boolean hasLogSequenceNumber(); + long getLogSequenceNumber(); + + // required uint64 writeTime = 4; + boolean hasWriteTime(); + long getWriteTime(); + + // optional .UUID clusterId = 5; + boolean hasClusterId(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getClusterId(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder getClusterIdOrBuilder(); + } + public static final class WALKey extends + com.google.protobuf.GeneratedMessage + implements WALKeyOrBuilder { + // Use WALKey.newBuilder() to construct. + private WALKey(Builder builder) { + super(builder); + } + private WALKey(boolean noInit) {} + + private static final WALKey defaultInstance; + public static WALKey getDefaultInstance() { + return defaultInstance; + } + + public WALKey getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + } + + private int bitField0_; + // required bytes encodedRegionName = 1; + public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString encodedRegionName_; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + + // required bytes tableName = 2; + public static final int TABLENAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required uint64 logSequenceNumber = 3; + public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3; + private long logSequenceNumber_; + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + + // required uint64 writeTime = 4; + public static final int WRITETIME_FIELD_NUMBER = 4; + private long writeTime_; + public boolean hasWriteTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getWriteTime() { + return writeTime_; + } + + // optional .UUID clusterId = 5; + public static final int CLUSTERID_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID clusterId_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getClusterId() { + return clusterId_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { + return clusterId_; + } + + private void initFields() { + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + tableName_ = com.google.protobuf.ByteString.EMPTY; + logSequenceNumber_ = 0L; + writeTime_ = 0L; + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasEncodedRegionName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasLogSequenceNumber()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasWriteTime()) { + memoizedIsInitialized = 0; + return false; + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, encodedRegionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, logSequenceNumber_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(4, writeTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeMessage(5, clusterId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, encodedRegionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, logSequenceNumber_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, writeTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, clusterId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey) obj; + + boolean result = true; + result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); + if (hasEncodedRegionName()) { + result = result && getEncodedRegionName() + .equals(other.getEncodedRegionName()); + } + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); + if (hasLogSequenceNumber()) { + result = result && (getLogSequenceNumber() + == other.getLogSequenceNumber()); + } + result = result && (hasWriteTime() == other.hasWriteTime()); + if (hasWriteTime()) { + result = result && (getWriteTime() + == other.getWriteTime()); + } + result = result && (hasClusterId() == other.hasClusterId()); + if (hasClusterId()) { + result = result && getClusterId() + .equals(other.getClusterId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEncodedRegionName()) { + hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getEncodedRegionName().hashCode(); + } + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasLogSequenceNumber()) { + hash = (37 * hash) + LOGSEQUENCENUMBER_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLogSequenceNumber()); + } + if (hasWriteTime()) { + hash = (37 * hash) + WRITETIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getWriteTime()); + } + if (hasClusterId()) { + hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; + hash = (53 * hash) + getClusterId().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getClusterIdFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + logSequenceNumber_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + writeTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.encodedRegionName_ = encodedRegionName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.logSequenceNumber_ = logSequenceNumber_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.writeTime_ = writeTime_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + if (clusterIdBuilder_ == null) { + result.clusterId_ = clusterId_; + } else { + result.clusterId_ = clusterIdBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance()) return this; + if (other.hasEncodedRegionName()) { + setEncodedRegionName(other.getEncodedRegionName()); + } + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasLogSequenceNumber()) { + setLogSequenceNumber(other.getLogSequenceNumber()); + } + if (other.hasWriteTime()) { + setWriteTime(other.getWriteTime()); + } + if (other.hasClusterId()) { + mergeClusterId(other.getClusterId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasEncodedRegionName()) { + + return false; + } + if (!hasTableName()) { + + return false; + } + if (!hasLogSequenceNumber()) { + + return false; + } + if (!hasWriteTime()) { + + return false; + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + encodedRegionName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + tableName_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + logSequenceNumber_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + writeTime_ = input.readUInt64(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.newBuilder(); + if (hasClusterId()) { + subBuilder.mergeFrom(getClusterId()); + } + input.readMessage(subBuilder, extensionRegistry); + setClusterId(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes encodedRegionName = 1; + private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + encodedRegionName_ = value; + onChanged(); + return this; + } + public Builder clearEncodedRegionName() { + bitField0_ = (bitField0_ & ~0x00000001); + encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); + onChanged(); + return this; + } + + // required bytes tableName = 2; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000002); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required uint64 logSequenceNumber = 3; + private long logSequenceNumber_ ; + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + public Builder setLogSequenceNumber(long value) { + bitField0_ |= 0x00000004; + logSequenceNumber_ = value; + onChanged(); + return this; + } + public Builder clearLogSequenceNumber() { + bitField0_ = (bitField0_ & ~0x00000004); + logSequenceNumber_ = 0L; + onChanged(); + return this; + } + + // required uint64 writeTime = 4; + private long writeTime_ ; + public boolean hasWriteTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getWriteTime() { + return writeTime_; + } + public Builder setWriteTime(long value) { + bitField0_ |= 0x00000008; + writeTime_ = value; + onChanged(); + return this; + } + public Builder clearWriteTime() { + bitField0_ = (bitField0_ & ~0x00000008); + writeTime_ = 0L; + onChanged(); + return this; + } + + // optional .UUID clusterId = 5; + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder> clusterIdBuilder_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID getClusterId() { + if (clusterIdBuilder_ == null) { + return clusterId_; + } else { + return clusterIdBuilder_.getMessage(); + } + } + public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID value) { + if (clusterIdBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + clusterId_ = value; + onChanged(); + } else { + clusterIdBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setClusterId( + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder builderForValue) { + if (clusterIdBuilder_ == null) { + clusterId_ = builderForValue.build(); + onChanged(); + } else { + clusterIdBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID value) { + if (clusterIdBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + clusterId_ != org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance()) { + clusterId_ = + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); + } else { + clusterId_ = value; + } + onChanged(); + } else { + clusterIdBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearClusterId() { + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.getDefaultInstance(); + onChanged(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder getClusterIdBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getClusterIdFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { + if (clusterIdBuilder_ != null) { + return clusterIdBuilder_.getMessageOrBuilder(); + } else { + return clusterId_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder> + getClusterIdFieldBuilder() { + if (clusterIdBuilder_ == null) { + clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUIDOrBuilder>( + clusterId_, + getParentForChildren(), + isClean()); + clusterId_ = null; + } + return clusterIdBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WALEntry.WALKey) + } + + static { + defaultInstance = new WALKey(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry.WALKey) + } + + public interface WALEditOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .KeyValue keyValue = 1; + java.util.List + getKeyValueList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getKeyValue(int index); + int getKeyValueCount(); + java.util.List + getKeyValueOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getKeyValueOrBuilder( + int index); + + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + java.util.List + getFamilyScopeList(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index); + int getFamilyScopeCount(); + java.util.List + getFamilyScopeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + int index); + } + public static final class WALEdit extends + com.google.protobuf.GeneratedMessage + implements WALEditOrBuilder { + // Use WALEdit.newBuilder() to construct. + private WALEdit(Builder builder) { + super(builder); + } + private WALEdit(boolean noInit) {} + + private static final WALEdit defaultInstance; + public static WALEdit getDefaultInstance() { + return defaultInstance; + } + + public WALEdit getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + } + + public enum ScopeType + implements com.google.protobuf.ProtocolMessageEnum { + REPLICATION_SCOPE_LOCAL(0, 0), + REPLICATION_SCOPE_GLOBAL(1, 1), + ; + + public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; + public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; + + + public final int getNumber() { return value; } + + public static ScopeType valueOf(int value) { + switch (value) { + case 0: return REPLICATION_SCOPE_LOCAL; + case 1: return REPLICATION_SCOPE_GLOBAL; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ScopeType findValueByNumber(int number) { + return ScopeType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0); + } + + private static final ScopeType[] VALUES = { + REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL, + }; + + public static ScopeType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private ScopeType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:WALEntry.WALEdit.ScopeType) + } + + public interface FamilyScopeOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + boolean hasScopeType(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType getScopeType(); + } + public static final class FamilyScope extends + com.google.protobuf.GeneratedMessage + implements FamilyScopeOrBuilder { + // Use FamilyScope.newBuilder() to construct. + private FamilyScope(Builder builder) { + super(builder); + } + private FamilyScope(boolean noInit) {} + + private static final FamilyScope defaultInstance; + public static FamilyScope getDefaultInstance() { + return defaultInstance; + } + + public FamilyScope getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + public static final int SCOPETYPE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType scopeType_; + public boolean hasScopeType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { + return scopeType_; + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasScopeType()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, scopeType_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, scopeType_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasScopeType() == other.hasScopeType()); + if (hasScopeType()) { + result = result && + (getScopeType() == other.getScopeType()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasScopeType()) { + hash = (37 * hash) + SCOPETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getScopeType()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.scopeType_ = scopeType_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasScopeType()) { + setScopeType(other.getScopeType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + if (!hasScopeType()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + scopeType_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required .WALEntry.WALEdit.ScopeType scopeType = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + public boolean hasScopeType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { + return scopeType_; + } + public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + scopeType_ = value; + onChanged(); + return this; + } + public Builder clearScopeType() { + bitField0_ = (bitField0_ & ~0x00000002); + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit.FamilyScope) + } + + static { + defaultInstance = new FamilyScope(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry.WALEdit.FamilyScope) + } + + // repeated .KeyValue keyValue = 1; + public static final int KEYVALUE_FIELD_NUMBER = 1; + private java.util.List keyValue_; + public java.util.List getKeyValueList() { + return keyValue_; + } + public java.util.List + getKeyValueOrBuilderList() { + return keyValue_; + } + public int getKeyValueCount() { + return keyValue_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getKeyValue(int index) { + return keyValue_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getKeyValueOrBuilder( + int index) { + return keyValue_.get(index); + } + + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + public static final int FAMILYSCOPE_FIELD_NUMBER = 2; + private java.util.List familyScope_; + public java.util.List getFamilyScopeList() { + return familyScope_; + } + public java.util.List + getFamilyScopeOrBuilderList() { + return familyScope_; + } + public int getFamilyScopeCount() { + return familyScope_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { + return familyScope_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + int index) { + return familyScope_.get(index); + } + + private void initFields() { + keyValue_ = java.util.Collections.emptyList(); + familyScope_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getKeyValueCount(); i++) { + if (!getKeyValue(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getFamilyScopeCount(); i++) { + if (!getFamilyScope(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < keyValue_.size(); i++) { + output.writeMessage(1, keyValue_.get(i)); + } + for (int i = 0; i < familyScope_.size(); i++) { + output.writeMessage(2, familyScope_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < keyValue_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, keyValue_.get(i)); + } + for (int i = 0; i < familyScope_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, familyScope_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit) obj; + + boolean result = true; + result = result && getKeyValueList() + .equals(other.getKeyValueList()); + result = result && getFamilyScopeList() + .equals(other.getFamilyScopeList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getKeyValueCount() > 0) { + hash = (37 * hash) + KEYVALUE_FIELD_NUMBER; + hash = (53 * hash) + getKeyValueList().hashCode(); + } + if (getFamilyScopeCount() > 0) { + hash = (37 * hash) + FAMILYSCOPE_FIELD_NUMBER; + hash = (53 * hash) + getFamilyScopeList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getKeyValueFieldBuilder(); + getFamilyScopeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (keyValueBuilder_ == null) { + keyValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + keyValueBuilder_.clear(); + } + if (familyScopeBuilder_ == null) { + familyScope_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + familyScopeBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit(this); + int from_bitField0_ = bitField0_; + if (keyValueBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + keyValue_ = java.util.Collections.unmodifiableList(keyValue_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.keyValue_ = keyValue_; + } else { + result.keyValue_ = keyValueBuilder_.build(); + } + if (familyScopeBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + familyScope_ = java.util.Collections.unmodifiableList(familyScope_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.familyScope_ = familyScope_; + } else { + result.familyScope_ = familyScopeBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this; + if (keyValueBuilder_ == null) { + if (!other.keyValue_.isEmpty()) { + if (keyValue_.isEmpty()) { + keyValue_ = other.keyValue_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureKeyValueIsMutable(); + keyValue_.addAll(other.keyValue_); + } + onChanged(); + } + } else { + if (!other.keyValue_.isEmpty()) { + if (keyValueBuilder_.isEmpty()) { + keyValueBuilder_.dispose(); + keyValueBuilder_ = null; + keyValue_ = other.keyValue_; + bitField0_ = (bitField0_ & ~0x00000001); + keyValueBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getKeyValueFieldBuilder() : null; + } else { + keyValueBuilder_.addAllMessages(other.keyValue_); + } + } + } + if (familyScopeBuilder_ == null) { + if (!other.familyScope_.isEmpty()) { + if (familyScope_.isEmpty()) { + familyScope_ = other.familyScope_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFamilyScopeIsMutable(); + familyScope_.addAll(other.familyScope_); + } + onChanged(); + } + } else { + if (!other.familyScope_.isEmpty()) { + if (familyScopeBuilder_.isEmpty()) { + familyScopeBuilder_.dispose(); + familyScopeBuilder_ = null; + familyScope_ = other.familyScope_; + bitField0_ = (bitField0_ & ~0x00000002); + familyScopeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFamilyScopeFieldBuilder() : null; + } else { + familyScopeBuilder_.addAllMessages(other.familyScope_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getKeyValueCount(); i++) { + if (!getKeyValue(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getFamilyScopeCount(); i++) { + if (!getFamilyScope(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addKeyValue(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addFamilyScope(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .KeyValue keyValue = 1; + private java.util.List keyValue_ = + java.util.Collections.emptyList(); + private void ensureKeyValueIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + keyValue_ = new java.util.ArrayList(keyValue_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> keyValueBuilder_; + + public java.util.List getKeyValueList() { + if (keyValueBuilder_ == null) { + return java.util.Collections.unmodifiableList(keyValue_); + } else { + return keyValueBuilder_.getMessageList(); + } + } + public int getKeyValueCount() { + if (keyValueBuilder_ == null) { + return keyValue_.size(); + } else { + return keyValueBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getKeyValue(int index) { + if (keyValueBuilder_ == null) { + return keyValue_.get(index); + } else { + return keyValueBuilder_.getMessage(index); + } + } + public Builder setKeyValue( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { + if (keyValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueIsMutable(); + keyValue_.set(index, value); + onChanged(); + } else { + keyValueBuilder_.setMessage(index, value); + } + return this; + } + public Builder setKeyValue( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { + if (keyValueBuilder_ == null) { + ensureKeyValueIsMutable(); + keyValue_.set(index, builderForValue.build()); + onChanged(); + } else { + keyValueBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addKeyValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { + if (keyValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueIsMutable(); + keyValue_.add(value); + onChanged(); + } else { + keyValueBuilder_.addMessage(value); + } + return this; + } + public Builder addKeyValue( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { + if (keyValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueIsMutable(); + keyValue_.add(index, value); + onChanged(); + } else { + keyValueBuilder_.addMessage(index, value); + } + return this; + } + public Builder addKeyValue( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { + if (keyValueBuilder_ == null) { + ensureKeyValueIsMutable(); + keyValue_.add(builderForValue.build()); + onChanged(); + } else { + keyValueBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addKeyValue( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { + if (keyValueBuilder_ == null) { + ensureKeyValueIsMutable(); + keyValue_.add(index, builderForValue.build()); + onChanged(); + } else { + keyValueBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllKeyValue( + java.lang.Iterable values) { + if (keyValueBuilder_ == null) { + ensureKeyValueIsMutable(); + super.addAll(values, keyValue_); + onChanged(); + } else { + keyValueBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearKeyValue() { + if (keyValueBuilder_ == null) { + keyValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + keyValueBuilder_.clear(); + } + return this; + } + public Builder removeKeyValue(int index) { + if (keyValueBuilder_ == null) { + ensureKeyValueIsMutable(); + keyValue_.remove(index); + onChanged(); + } else { + keyValueBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder getKeyValueBuilder( + int index) { + return getKeyValueFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getKeyValueOrBuilder( + int index) { + if (keyValueBuilder_ == null) { + return keyValue_.get(index); } else { + return keyValueBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getKeyValueOrBuilderList() { + if (keyValueBuilder_ != null) { + return keyValueBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(keyValue_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addKeyValueBuilder() { + return getKeyValueFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addKeyValueBuilder( + int index) { + return getKeyValueFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); + } + public java.util.List + getKeyValueBuilderList() { + return getKeyValueFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> + getKeyValueFieldBuilder() { + if (keyValueBuilder_ == null) { + keyValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder>( + keyValue_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + keyValue_ = null; + } + return keyValueBuilder_; + } + + // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; + private java.util.List familyScope_ = + java.util.Collections.emptyList(); + private void ensureFamilyScopeIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + familyScope_ = new java.util.ArrayList(familyScope_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_; + + public java.util.List getFamilyScopeList() { + if (familyScopeBuilder_ == null) { + return java.util.Collections.unmodifiableList(familyScope_); + } else { + return familyScopeBuilder_.getMessageList(); + } + } + public int getFamilyScopeCount() { + if (familyScopeBuilder_ == null) { + return familyScope_.size(); + } else { + return familyScopeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { + if (familyScopeBuilder_ == null) { + return familyScope_.get(index); + } else { + return familyScopeBuilder_.getMessage(index); + } + } + public Builder setFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope value) { + if (familyScopeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyScopeIsMutable(); + familyScope_.set(index, value); + onChanged(); + } else { + familyScopeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.set(index, builderForValue.build()); + onChanged(); + } else { + familyScopeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope value) { + if (familyScopeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyScopeIsMutable(); + familyScope_.add(value); + onChanged(); + } else { + familyScopeBuilder_.addMessage(value); + } + return this; + } + public Builder addFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope value) { + if (familyScopeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyScopeIsMutable(); + familyScope_.add(index, value); + onChanged(); + } else { + familyScopeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addFamilyScope( + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.add(builderForValue.build()); + onChanged(); + } else { + familyScopeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addFamilyScope( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.add(index, builderForValue.build()); + onChanged(); + } else { + familyScopeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllFamilyScope( + java.lang.Iterable values) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + super.addAll(values, familyScope_); + onChanged(); + } else { + familyScopeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearFamilyScope() { + if (familyScopeBuilder_ == null) { + familyScope_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + familyScopeBuilder_.clear(); + } + return this; + } + public Builder removeFamilyScope(int index) { + if (familyScopeBuilder_ == null) { + ensureFamilyScopeIsMutable(); + familyScope_.remove(index); + onChanged(); + } else { + familyScopeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder( + int index) { + return getFamilyScopeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( + int index) { + if (familyScopeBuilder_ == null) { + return familyScope_.get(index); } else { + return familyScopeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getFamilyScopeOrBuilderList() { + if (familyScopeBuilder_ != null) { + return familyScopeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(familyScope_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() { + return getFamilyScopeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder( + int index) { + return getFamilyScopeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); + } + public java.util.List + getFamilyScopeBuilderList() { + return getFamilyScopeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> + getFamilyScopeFieldBuilder() { + if (familyScopeBuilder_ == null) { + familyScopeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>( + familyScope_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + familyScope_ = null; + } + return familyScopeBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit) + } + + static { + defaultInstance = new WALEdit(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry.WALEdit) + } + + private int bitField0_; + // required .WALEntry.WALKey walKey = 1; + public static final int WALKEY_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey walKey_; + public boolean hasWalKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getWalKey() { + return walKey_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { + return walKey_; + } + + // required .WALEntry.WALEdit edit = 2; + public static final int EDIT_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit edit_; + public boolean hasEdit() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getEdit() { + return edit_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { + return edit_; + } + + private void initFields() { + walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); + edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasWalKey()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasEdit()) { + memoizedIsInitialized = 0; + return false; + } + if (!getWalKey().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getEdit().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, walKey_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, edit_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, walKey_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, edit_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry) obj; + + boolean result = true; + result = result && (hasWalKey() == other.hasWalKey()); + if (hasWalKey()) { + result = result && getWalKey() + .equals(other.getWalKey()); + } + result = result && (hasEdit() == other.hasEdit()); + if (hasEdit()) { + result = result && getEdit() + .equals(other.getEdit()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasWalKey()) { + hash = (37 * hash) + WALKEY_FIELD_NUMBER; + hash = (53 * hash) + getWalKey().hashCode(); + } + if (hasEdit()) { + hash = (37 * hash) + EDIT_FIELD_NUMBER; + hash = (53 * hash) + getEdit().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_WALEntry_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getWalKeyFieldBuilder(); + getEditFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (walKeyBuilder_ == null) { + walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); + } else { + walKeyBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (editBuilder_ == null) { + edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + } else { + editBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (walKeyBuilder_ == null) { + result.walKey_ = walKey_; + } else { + result.walKey_ = walKeyBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (editBuilder_ == null) { + result.edit_ = edit_; + } else { + result.edit_ = editBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance()) return this; + if (other.hasWalKey()) { + mergeWalKey(other.getWalKey()); + } + if (other.hasEdit()) { + mergeEdit(other.getEdit()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasWalKey()) { + + return false; + } + if (!hasEdit()) { + + return false; + } + if (!getWalKey().isInitialized()) { + + return false; + } + if (!getEdit().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.newBuilder(); + if (hasWalKey()) { + subBuilder.mergeFrom(getWalKey()); + } + input.readMessage(subBuilder, extensionRegistry); + setWalKey(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.newBuilder(); + if (hasEdit()) { + subBuilder.mergeFrom(getEdit()); + } + input.readMessage(subBuilder, extensionRegistry); + setEdit(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .WALEntry.WALKey walKey = 1; + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder> walKeyBuilder_; + public boolean hasWalKey() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey getWalKey() { + if (walKeyBuilder_ == null) { + return walKey_; + } else { + return walKeyBuilder_.getMessage(); + } + } + public Builder setWalKey(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey value) { + if (walKeyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + walKey_ = value; + onChanged(); + } else { + walKeyBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setWalKey( + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder builderForValue) { + if (walKeyBuilder_ == null) { + walKey_ = builderForValue.build(); + onChanged(); + } else { + walKeyBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeWalKey(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey value) { + if (walKeyBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + walKey_ != org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance()) { + walKey_ = + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.newBuilder(walKey_).mergeFrom(value).buildPartial(); + } else { + walKey_ = value; + } + onChanged(); + } else { + walKeyBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearWalKey() { + if (walKeyBuilder_ == null) { + walKey_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.getDefaultInstance(); + onChanged(); + } else { + walKeyBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder getWalKeyBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getWalKeyFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder getWalKeyOrBuilder() { + if (walKeyBuilder_ != null) { + return walKeyBuilder_.getMessageOrBuilder(); + } else { + return walKey_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder> + getWalKeyFieldBuilder() { + if (walKeyBuilder_ == null) { + walKeyBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKeyOrBuilder>( + walKey_, + getParentForChildren(), + isClean()); + walKey_ = null; + } + return walKeyBuilder_; + } + + // required .WALEntry.WALEdit edit = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder> editBuilder_; + public boolean hasEdit() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit getEdit() { + if (editBuilder_ == null) { + return edit_; + } else { + return editBuilder_.getMessage(); + } + } + public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit value) { + if (editBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + edit_ = value; + onChanged(); + } else { + editBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setEdit( + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder builderForValue) { + if (editBuilder_ == null) { + edit_ = builderForValue.build(); + onChanged(); + } else { + editBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit value) { + if (editBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + edit_ != org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance()) { + edit_ = + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial(); + } else { + edit_ = value; + } + onChanged(); + } else { + editBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearEdit() { + if (editBuilder_ == null) { + edit_ = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.getDefaultInstance(); + onChanged(); + } else { + editBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder getEditBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getEditFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { + if (editBuilder_ != null) { + return editBuilder_.getMessageOrBuilder(); + } else { + return edit_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder> + getEditFieldBuilder() { + if (editBuilder_ == null) { + editBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEditOrBuilder>( + edit_, + getParentForChildren(), + isClean()); + edit_ = null; + } + return editBuilder_; + } + + // @@protoc_insertion_point(builder_scope:WALEntry) + } + + static { + defaultInstance = new WALEntry(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALEntry) + } + + public interface ReplicateWALEntryRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .WALEntry walEntry = 1; + java.util.List + getWalEntryList(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getWalEntry(int index); + int getWalEntryCount(); + java.util.List + getWalEntryOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( + int index); + } + public static final class ReplicateWALEntryRequest extends + com.google.protobuf.GeneratedMessage + implements ReplicateWALEntryRequestOrBuilder { + // Use ReplicateWALEntryRequest.newBuilder() to construct. + private ReplicateWALEntryRequest(Builder builder) { + super(builder); + } + private ReplicateWALEntryRequest(boolean noInit) {} + + private static final ReplicateWALEntryRequest defaultInstance; + public static ReplicateWALEntryRequest getDefaultInstance() { + return defaultInstance; + } + + public ReplicateWALEntryRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + } + + // repeated .WALEntry walEntry = 1; + public static final int WALENTRY_FIELD_NUMBER = 1; + private java.util.List walEntry_; + public java.util.List getWalEntryList() { + return walEntry_; + } + public java.util.List + getWalEntryOrBuilderList() { + return walEntry_; + } + public int getWalEntryCount() { + return walEntry_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getWalEntry(int index) { + return walEntry_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( + int index) { + return walEntry_.get(index); + } + + private void initFields() { + walEntry_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getWalEntryCount(); i++) { + if (!getWalEntry(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < walEntry_.size(); i++) { + output.writeMessage(1, walEntry_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < walEntry_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, walEntry_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest) obj; + + boolean result = true; + result = result && getWalEntryList() + .equals(other.getWalEntryList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getWalEntryCount() > 0) { + hash = (37 * hash) + WALENTRY_FIELD_NUMBER; + hash = (53 * hash) + getWalEntryList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getWalEntryFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (walEntryBuilder_ == null) { + walEntry_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + walEntryBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest(this); + int from_bitField0_ = bitField0_; + if (walEntryBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + walEntry_ = java.util.Collections.unmodifiableList(walEntry_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.walEntry_ = walEntry_; + } else { + result.walEntry_ = walEntryBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance()) return this; + if (walEntryBuilder_ == null) { + if (!other.walEntry_.isEmpty()) { + if (walEntry_.isEmpty()) { + walEntry_ = other.walEntry_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureWalEntryIsMutable(); + walEntry_.addAll(other.walEntry_); + } + onChanged(); + } + } else { + if (!other.walEntry_.isEmpty()) { + if (walEntryBuilder_.isEmpty()) { + walEntryBuilder_.dispose(); + walEntryBuilder_ = null; + walEntry_ = other.walEntry_; + bitField0_ = (bitField0_ & ~0x00000001); + walEntryBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getWalEntryFieldBuilder() : null; + } else { + walEntryBuilder_.addAllMessages(other.walEntry_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getWalEntryCount(); i++) { + if (!getWalEntry(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addWalEntry(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .WALEntry walEntry = 1; + private java.util.List walEntry_ = + java.util.Collections.emptyList(); + private void ensureWalEntryIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + walEntry_ = new java.util.ArrayList(walEntry_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder> walEntryBuilder_; + + public java.util.List getWalEntryList() { + if (walEntryBuilder_ == null) { + return java.util.Collections.unmodifiableList(walEntry_); + } else { + return walEntryBuilder_.getMessageList(); + } + } + public int getWalEntryCount() { + if (walEntryBuilder_ == null) { + return walEntry_.size(); + } else { + return walEntryBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry getWalEntry(int index) { + if (walEntryBuilder_ == null) { + return walEntry_.get(index); + } else { + return walEntryBuilder_.getMessage(index); + } + } + public Builder setWalEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry value) { + if (walEntryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureWalEntryIsMutable(); + walEntry_.set(index, value); + onChanged(); + } else { + walEntryBuilder_.setMessage(index, value); + } + return this; + } + public Builder setWalEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder builderForValue) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + walEntry_.set(index, builderForValue.build()); + onChanged(); + } else { + walEntryBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addWalEntry(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry value) { + if (walEntryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureWalEntryIsMutable(); + walEntry_.add(value); + onChanged(); + } else { + walEntryBuilder_.addMessage(value); + } + return this; + } + public Builder addWalEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry value) { + if (walEntryBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureWalEntryIsMutable(); + walEntry_.add(index, value); + onChanged(); + } else { + walEntryBuilder_.addMessage(index, value); + } + return this; + } + public Builder addWalEntry( + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder builderForValue) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + walEntry_.add(builderForValue.build()); + onChanged(); + } else { + walEntryBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addWalEntry( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder builderForValue) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + walEntry_.add(index, builderForValue.build()); + onChanged(); + } else { + walEntryBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllWalEntry( + java.lang.Iterable values) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + super.addAll(values, walEntry_); + onChanged(); + } else { + walEntryBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearWalEntry() { + if (walEntryBuilder_ == null) { + walEntry_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + walEntryBuilder_.clear(); + } + return this; + } + public Builder removeWalEntry(int index) { + if (walEntryBuilder_ == null) { + ensureWalEntryIsMutable(); + walEntry_.remove(index); + onChanged(); + } else { + walEntryBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder getWalEntryBuilder( + int index) { + return getWalEntryFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder getWalEntryOrBuilder( + int index) { + if (walEntryBuilder_ == null) { + return walEntry_.get(index); } else { + return walEntryBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getWalEntryOrBuilderList() { + if (walEntryBuilder_ != null) { + return walEntryBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(walEntry_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder addWalEntryBuilder() { + return getWalEntryFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder addWalEntryBuilder( + int index) { + return getWalEntryFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.getDefaultInstance()); + } + public java.util.List + getWalEntryBuilderList() { + return getWalEntryFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder> + getWalEntryFieldBuilder() { + if (walEntryBuilder_ == null) { + walEntryBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntryOrBuilder>( + walEntry_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + walEntry_ = null; + } + return walEntryBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ReplicateWALEntryRequest) + } + + static { + defaultInstance = new ReplicateWALEntryRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicateWALEntryRequest) + } + + public interface ReplicateWALEntryResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class ReplicateWALEntryResponse extends + com.google.protobuf.GeneratedMessage + implements ReplicateWALEntryResponseOrBuilder { + // Use ReplicateWALEntryResponse.newBuilder() to construct. + private ReplicateWALEntryResponse(Builder builder) { + super(builder); + } + private ReplicateWALEntryResponse(boolean noInit) {} + + private static final ReplicateWALEntryResponse defaultInstance; + public static ReplicateWALEntryResponse getDefaultInstance() { + return defaultInstance; + } + + public ReplicateWALEntryResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:ReplicateWALEntryResponse) + } + + static { + defaultInstance = new ReplicateWALEntryResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ReplicateWALEntryResponse) + } + + public interface RollWALWriterRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class RollWALWriterRequest extends + com.google.protobuf.GeneratedMessage + implements RollWALWriterRequestOrBuilder { + // Use RollWALWriterRequest.newBuilder() to construct. + private RollWALWriterRequest(Builder builder) { + super(builder); + } + private RollWALWriterRequest(boolean noInit) {} + + private static final RollWALWriterRequest defaultInstance; + public static RollWALWriterRequest getDefaultInstance() { + return defaultInstance; + } + + public RollWALWriterRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:RollWALWriterRequest) + } + + static { + defaultInstance = new RollWALWriterRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RollWALWriterRequest) + } + + public interface RollWALWriterResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated bytes regionToFlush = 1; + java.util.List getRegionToFlushList(); + int getRegionToFlushCount(); + com.google.protobuf.ByteString getRegionToFlush(int index); + } + public static final class RollWALWriterResponse extends + com.google.protobuf.GeneratedMessage + implements RollWALWriterResponseOrBuilder { + // Use RollWALWriterResponse.newBuilder() to construct. + private RollWALWriterResponse(Builder builder) { + super(builder); + } + private RollWALWriterResponse(boolean noInit) {} + + private static final RollWALWriterResponse defaultInstance; + public static RollWALWriterResponse getDefaultInstance() { + return defaultInstance; + } + + public RollWALWriterResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + } + + // repeated bytes regionToFlush = 1; + public static final int REGIONTOFLUSH_FIELD_NUMBER = 1; + private java.util.List regionToFlush_; + public java.util.List + getRegionToFlushList() { + return regionToFlush_; + } + public int getRegionToFlushCount() { + return regionToFlush_.size(); + } + public com.google.protobuf.ByteString getRegionToFlush(int index) { + return regionToFlush_.get(index); + } + + private void initFields() { + regionToFlush_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < regionToFlush_.size(); i++) { + output.writeBytes(1, regionToFlush_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + { + int dataSize = 0; + for (int i = 0; i < regionToFlush_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(regionToFlush_.get(i)); + } + size += dataSize; + size += 1 * getRegionToFlushList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse) obj; + + boolean result = true; + result = result && getRegionToFlushList() + .equals(other.getRegionToFlushList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRegionToFlushCount() > 0) { + hash = (37 * hash) + REGIONTOFLUSH_FIELD_NUMBER; + hash = (53 * hash) + getRegionToFlushList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_RollWALWriterResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + regionToFlush_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse(this); + int from_bitField0_ = bitField0_; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + regionToFlush_ = java.util.Collections.unmodifiableList(regionToFlush_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.regionToFlush_ = regionToFlush_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance()) return this; + if (!other.regionToFlush_.isEmpty()) { + if (regionToFlush_.isEmpty()) { + regionToFlush_ = other.regionToFlush_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRegionToFlushIsMutable(); + regionToFlush_.addAll(other.regionToFlush_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + ensureRegionToFlushIsMutable(); + regionToFlush_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // repeated bytes regionToFlush = 1; + private java.util.List regionToFlush_ = java.util.Collections.emptyList();; + private void ensureRegionToFlushIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + regionToFlush_ = new java.util.ArrayList(regionToFlush_); + bitField0_ |= 0x00000001; + } + } + public java.util.List + getRegionToFlushList() { + return java.util.Collections.unmodifiableList(regionToFlush_); + } + public int getRegionToFlushCount() { + return regionToFlush_.size(); + } + public com.google.protobuf.ByteString getRegionToFlush(int index) { + return regionToFlush_.get(index); + } + public Builder setRegionToFlush( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionToFlushIsMutable(); + regionToFlush_.set(index, value); + onChanged(); + return this; + } + public Builder addRegionToFlush(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRegionToFlushIsMutable(); + regionToFlush_.add(value); + onChanged(); + return this; + } + public Builder addAllRegionToFlush( + java.lang.Iterable values) { + ensureRegionToFlushIsMutable(); + super.addAll(values, regionToFlush_); + onChanged(); + return this; + } + public Builder clearRegionToFlush() { + regionToFlush_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:RollWALWriterResponse) + } + + static { + defaultInstance = new RollWALWriterResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:RollWALWriterResponse) + } + + public interface StopServerRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string reason = 1; + boolean hasReason(); + String getReason(); + } + public static final class StopServerRequest extends + com.google.protobuf.GeneratedMessage + implements StopServerRequestOrBuilder { + // Use StopServerRequest.newBuilder() to construct. + private StopServerRequest(Builder builder) { + super(builder); + } + private StopServerRequest(boolean noInit) {} + + private static final StopServerRequest defaultInstance; + public static StopServerRequest getDefaultInstance() { + return defaultInstance; + } + + public StopServerRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + } + + private int bitField0_; + // required string reason = 1; + public static final int REASON_FIELD_NUMBER = 1; + private java.lang.Object reason_; + public boolean hasReason() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getReason() { + java.lang.Object ref = reason_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + reason_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getReasonBytes() { + java.lang.Object ref = reason_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + reason_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + reason_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasReason()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getReasonBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getReasonBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest) obj; + + boolean result = true; + result = result && (hasReason() == other.hasReason()); + if (hasReason()) { + result = result && getReason() + .equals(other.getReason()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasReason()) { + hash = (37 * hash) + REASON_FIELD_NUMBER; + hash = (53 * hash) + getReason().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + reason_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.reason_ = reason_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance()) return this; + if (other.hasReason()) { + setReason(other.getReason()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasReason()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + reason_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string reason = 1; + private java.lang.Object reason_ = ""; + public boolean hasReason() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getReason() { + java.lang.Object ref = reason_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + reason_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setReason(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + reason_ = value; + onChanged(); + return this; + } + public Builder clearReason() { + bitField0_ = (bitField0_ & ~0x00000001); + reason_ = getDefaultInstance().getReason(); + onChanged(); + return this; + } + void setReason(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + reason_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:StopServerRequest) + } + + static { + defaultInstance = new StopServerRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:StopServerRequest) + } + + public interface StopServerResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class StopServerResponse extends + com.google.protobuf.GeneratedMessage + implements StopServerResponseOrBuilder { + // Use StopServerResponse.newBuilder() to construct. + private StopServerResponse(Builder builder) { + super(builder); + } + private StopServerResponse(boolean noInit) {} + + private static final StopServerResponse defaultInstance; + public static StopServerResponse getDefaultInstance() { + return defaultInstance; + } + + public StopServerResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_StopServerResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:StopServerResponse) + } + + static { + defaultInstance = new StopServerResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:StopServerResponse) + } + + public interface GetServerInfoRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class GetServerInfoRequest extends + com.google.protobuf.GeneratedMessage + implements GetServerInfoRequestOrBuilder { + // Use GetServerInfoRequest.newBuilder() to construct. + private GetServerInfoRequest(Builder builder) { + super(builder); + } + private GetServerInfoRequest(boolean noInit) {} + + private static final GetServerInfoRequest defaultInstance; + public static GetServerInfoRequest getDefaultInstance() { + return defaultInstance; + } + + public GetServerInfoRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:GetServerInfoRequest) + } + + static { + defaultInstance = new GetServerInfoRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetServerInfoRequest) + } + + public interface GetServerInfoResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .ServerName serverName = 1; + boolean hasServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); + } + public static final class GetServerInfoResponse extends + com.google.protobuf.GeneratedMessage + implements GetServerInfoResponseOrBuilder { + // Use GetServerInfoResponse.newBuilder() to construct. + private GetServerInfoResponse(Builder builder) { + super(builder); + } + private GetServerInfoResponse(boolean noInit) {} + + private static final GetServerInfoResponse defaultInstance; + public static GetServerInfoResponse getDefaultInstance() { + return defaultInstance; + } + + public GetServerInfoResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + } + + private int bitField0_; + // required .ServerName serverName = 1; + public static final int SERVERNAME_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + return serverName_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + return serverName_; + } + + private void initFields() { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasServerName()) { + memoizedIsInitialized = 0; + return false; + } + if (!getServerName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, serverName_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, serverName_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse) obj; + + boolean result = true; + result = result && (hasServerName() == other.hasServerName()); + if (hasServerName()) { + result = result && getServerName() + .equals(other.getServerName()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasServerName()) { + hash = (37 * hash) + SERVERNAME_FIELD_NUMBER; + hash = (53 * hash) + getServerName().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.internal_static_GetServerInfoResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getServerNameFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (serverNameBuilder_ == null) { + result.serverName_ = serverName_; + } else { + result.serverName_ = serverNameBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance()) return this; + if (other.hasServerName()) { + mergeServerName(other.getServerName()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasServerName()) { + + return false; + } + if (!getServerName().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); + if (hasServerName()) { + subBuilder.mergeFrom(getServerName()); + } + input.readMessage(subBuilder, extensionRegistry); + setServerName(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .ServerName serverName = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + public boolean hasServerName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() { + if (serverNameBuilder_ == null) { + return serverName_; + } else { + return serverNameBuilder_.getMessage(); + } + } + public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + serverName_ = value; + onChanged(); + } else { + serverNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setServerName( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + serverName_ = builderForValue.build(); + onChanged(); + } else { + serverNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + serverName_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); + } else { + serverName_ = value; + } + onChanged(); + } else { + serverNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearServerName() { + if (serverNameBuilder_ == null) { + serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); + onChanged(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getServerNameFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + if (serverNameBuilder_ != null) { + return serverNameBuilder_.getMessageOrBuilder(); + } else { + return serverName_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerNameFieldBuilder() { + if (serverNameBuilder_ == null) { + serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + serverName_, + getParentForChildren(), + isClean()); + serverName_ = null; + } + return serverNameBuilder_; + } + + // @@protoc_insertion_point(builder_scope:GetServerInfoResponse) + } + + static { + defaultInstance = new GetServerInfoResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetServerInfoResponse) + } + + public static abstract class RegionAdminService + implements com.google.protobuf.Service { + protected RegionAdminService() {} + + public interface Interface { + public abstract void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new RegionAdminService() { + @java.lang.Override + public void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done) { + impl.getRegionInfo(controller, request, done); + } + + @java.lang.Override + public void getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done) { + impl.getStoreFileList(controller, request, done); + } + + @java.lang.Override + public void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.getOnlineRegion(controller, request, done); + } + + @java.lang.Override + public void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.openRegion(controller, request, done); + } + + @java.lang.Override + public void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.closeRegion(controller, request, done); + } + + @java.lang.Override + public void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.flushRegion(controller, request, done); + } + + @java.lang.Override + public void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.splitRegion(controller, request, done); + } + + @java.lang.Override + public void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done) { + impl.compactRegion(controller, request, done); + } + + @java.lang.Override + public void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done) { + impl.replicateWALEntry(controller, request, done); + } + + @java.lang.Override + public void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done) { + impl.rollWALWriter(controller, request, done); + } + + @java.lang.Override + public void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done) { + impl.getServerInfo(controller, request, done); + } + + @java.lang.Override + public void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done) { + impl.stopServer(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)request); + case 1: + return impl.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)request); + case 2: + return impl.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)request); + case 3: + return impl.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)request); + case 4: + return impl.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)request); + case 5: + return impl.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)request); + case 6: + return impl.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)request); + case 7: + return impl.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)request); + case 8: + return impl.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)request); + case 9: + return impl.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)request); + case 10: + return impl.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)request); + case 11: + return impl.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.getRegionInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.getStoreFileList(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.getOnlineRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.openRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.closeRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.flushRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: + this.splitRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 7: + this.compactRegion(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 8: + this.replicateWALEntry(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 9: + this.rollWALWriter(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 10: + this.getServerInfo(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 11: + this.stopServer(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(); + case 8: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(); + case 9: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(); + case 10: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(); + case 11: + return org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RegionAdminService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance())); + } + + public void getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance())); + } + + public void getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance())); + } + + public void openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance())); + } + + public void closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance())); + } + + public void flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance())); + } + + public void splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance())); + } + + public void compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance())); + } + + public void replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance())); + } + + public void rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance())); + } + + public void getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance())); + } + + public void stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse getRegionInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse getStoreFileList( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse getOnlineRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse openRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse closeRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse flushRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse splitRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse compactRegion( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse replicateWALEntry( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(8), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse rollWALWriter( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(9), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse getServerInfo( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(10), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse stopServer( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(11), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetRegionInfoRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetRegionInfoRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetRegionInfoResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetRegionInfoResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetStoreFileListRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetStoreFileListRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetStoreFileListResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetStoreFileListResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetOnlineRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetOnlineRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetOnlineRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetOnlineRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OpenRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OpenRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_OpenRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_OpenRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CloseRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CloseRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CloseRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CloseRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FlushRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FlushRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FlushRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FlushRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SplitRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SplitRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_SplitRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_SplitRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CompactRegionRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CompactRegionRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CompactRegionResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CompactRegionResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UUID_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UUID_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_WALKey_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_WALKey_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_WALEdit_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_WALEdit_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALEntry_WALEdit_FamilyScope_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicateWALEntryRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicateWALEntryRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ReplicateWALEntryResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ReplicateWALEntryResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RollWALWriterRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RollWALWriterRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_RollWALWriterResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_RollWALWriterResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_StopServerRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_StopServerRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_StopServerResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_StopServerResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetServerInfoRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetServerInfoRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetServerInfoResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetServerInfoResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\021RegionAdmin.proto\032\013hbase.proto\"8\n\024GetR" + + "egionInfoRequest\022 \n\006region\030\001 \002(\0132\020.Regio" + + "nSpecifier\"8\n\025GetRegionInfoResponse\022\037\n\nr" + + "egionInfo\030\001 \002(\0132\013.RegionInfo\"Q\n\027GetStore" + + "FileListRequest\022 \n\006region\030\001 \002(\0132\020.Region" + + "Specifier\022\024\n\014columnFamily\030\002 \003(\014\"-\n\030GetSt" + + "oreFileListResponse\022\021\n\tstoreFile\030\001 \003(\t\"\030" + + "\n\026GetOnlineRegionRequest\":\n\027GetOnlineReg" + + "ionResponse\022\037\n\nregionInfo\030\001 \003(\0132\013.Region" + + "Info\"S\n\021OpenRegionRequest\022 \n\006region\030\001 \003(", + "\0132\020.RegionSpecifier\022\034\n\024versionOfOfflineN" + + "ode\030\002 \001(\r\"\234\001\n\022OpenRegionResponse\022<\n\014open" + + "ingState\030\001 \003(\0162&.OpenRegionResponse.Regi" + + "onOpeningState\"H\n\022RegionOpeningState\022\n\n\006" + + "OPENED\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022\n\016FAILED_O" + + "PENING\020\002\"r\n\022CloseRegionRequest\022 \n\006region" + + "\030\001 \002(\0132\020.RegionSpecifier\022\034\n\024versionOfClo" + + "singNode\030\002 \001(\r\022\034\n\016transitionInZK\030\003 \001(\010:\004" + + "true\"%\n\023CloseRegionResponse\022\016\n\006closed\030\001 " + + "\002(\010\"M\n\022FlushRegionRequest\022 \n\006region\030\001 \002(", + "\0132\020.RegionSpecifier\022\025\n\rifOlderThanTs\030\002 \001" + + "(\004\"=\n\023FlushRegionResponse\022\025\n\rlastFlushTi" + + "me\030\001 \002(\004\022\017\n\007flushed\030\002 \001(\010\"J\n\022SplitRegion" + + "Request\022 \n\006region\030\001 \002(\0132\020.RegionSpecifie" + + "r\022\022\n\nsplitPoint\030\002 \001(\014\"\025\n\023SplitRegionResp" + + "onse\"G\n\024CompactRegionRequest\022 \n\006region\030\001" + + " \002(\0132\020.RegionSpecifier\022\r\n\005major\030\002 \001(\010\"\027\n" + + "\025CompactRegionResponse\"1\n\004UUID\022\024\n\014leastS" + + "igBits\030\001 \002(\004\022\023\n\013mostSigBits\030\002 \002(\004\"\301\003\n\010WA" + + "LEntry\022 \n\006walKey\030\001 \002(\0132\020.WALEntry.WALKey", + "\022\037\n\004edit\030\002 \002(\0132\021.WALEntry.WALEdit\032~\n\006WAL" + + "Key\022\031\n\021encodedRegionName\030\001 \002(\014\022\021\n\ttableN" + + "ame\030\002 \002(\014\022\031\n\021logSequenceNumber\030\003 \002(\004\022\021\n\t" + + "writeTime\030\004 \002(\004\022\030\n\tclusterId\030\005 \001(\0132\005.UUI" + + "D\032\361\001\n\007WALEdit\022\033\n\010keyValue\030\001 \003(\0132\t.KeyVal" + + "ue\0222\n\013familyScope\030\002 \003(\0132\035.WALEntry.WALEd" + + "it.FamilyScope\032M\n\013FamilyScope\022\016\n\006family\030" + + "\001 \002(\014\022.\n\tscopeType\030\002 \002(\0162\033.WALEntry.WALE" + + "dit.ScopeType\"F\n\tScopeType\022\033\n\027REPLICATIO" + + "N_SCOPE_LOCAL\020\000\022\034\n\030REPLICATION_SCOPE_GLO", + "BAL\020\001\"7\n\030ReplicateWALEntryRequest\022\033\n\010wal" + + "Entry\030\001 \003(\0132\t.WALEntry\"\033\n\031ReplicateWALEn" + + "tryResponse\"\026\n\024RollWALWriterRequest\".\n\025R" + + "ollWALWriterResponse\022\025\n\rregionToFlush\030\001 " + + "\003(\014\"#\n\021StopServerRequest\022\016\n\006reason\030\001 \002(\t" + + "\"\024\n\022StopServerResponse\"\026\n\024GetServerInfoR" + + "equest\"8\n\025GetServerInfoResponse\022\037\n\nserve" + + "rName\030\001 \002(\0132\013.ServerName2\213\006\n\022RegionAdmin" + + "Service\022>\n\rgetRegionInfo\022\025.GetRegionInfo" + + "Request\032\026.GetRegionInfoResponse\022G\n\020getSt", + "oreFileList\022\030.GetStoreFileListRequest\032\031." + + "GetStoreFileListResponse\022D\n\017getOnlineReg" + + "ion\022\027.GetOnlineRegionRequest\032\030.GetOnline" + + "RegionResponse\0225\n\nopenRegion\022\022.OpenRegio" + + "nRequest\032\023.OpenRegionResponse\0228\n\013closeRe" + + "gion\022\023.CloseRegionRequest\032\024.CloseRegionR" + + "esponse\0228\n\013flushRegion\022\023.FlushRegionRequ" + + "est\032\024.FlushRegionResponse\0228\n\013splitRegion" + + "\022\023.SplitRegionRequest\032\024.SplitRegionRespo" + + "nse\022>\n\rcompactRegion\022\025.CompactRegionRequ", + "est\032\026.CompactRegionResponse\022J\n\021replicate" + + "WALEntry\022\031.ReplicateWALEntryRequest\032\032.Re" + + "plicateWALEntryResponse\022>\n\rrollWALWriter" + + "\022\025.RollWALWriterRequest\032\026.RollWALWriterR" + + "esponse\022>\n\rgetServerInfo\022\025.GetServerInfo" + + "Request\032\026.GetServerInfoResponse\0225\n\nstopS" + + "erver\022\022.StopServerRequest\032\023.StopServerRe" + + "sponseBG\n*org.apache.hadoop.hbase.protob" + + "uf.generatedB\021RegionAdminProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_GetRegionInfoRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_GetRegionInfoRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetRegionInfoRequest_descriptor, + new java.lang.String[] { "Region", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoRequest.Builder.class); + internal_static_GetRegionInfoResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_GetRegionInfoResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetRegionInfoResponse_descriptor, + new java.lang.String[] { "RegionInfo", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetRegionInfoResponse.Builder.class); + internal_static_GetStoreFileListRequest_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_GetStoreFileListRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetStoreFileListRequest_descriptor, + new java.lang.String[] { "Region", "ColumnFamily", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListRequest.Builder.class); + internal_static_GetStoreFileListResponse_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_GetStoreFileListResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetStoreFileListResponse_descriptor, + new java.lang.String[] { "StoreFile", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetStoreFileListResponse.Builder.class); + internal_static_GetOnlineRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_GetOnlineRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetOnlineRegionRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionRequest.Builder.class); + internal_static_GetOnlineRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_GetOnlineRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetOnlineRegionResponse_descriptor, + new java.lang.String[] { "RegionInfo", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetOnlineRegionResponse.Builder.class); + internal_static_OpenRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_OpenRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OpenRegionRequest_descriptor, + new java.lang.String[] { "Region", "VersionOfOfflineNode", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionRequest.Builder.class); + internal_static_OpenRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_OpenRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_OpenRegionResponse_descriptor, + new java.lang.String[] { "OpeningState", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.OpenRegionResponse.Builder.class); + internal_static_CloseRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_CloseRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CloseRegionRequest_descriptor, + new java.lang.String[] { "Region", "VersionOfClosingNode", "TransitionInZK", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionRequest.Builder.class); + internal_static_CloseRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_CloseRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CloseRegionResponse_descriptor, + new java.lang.String[] { "Closed", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CloseRegionResponse.Builder.class); + internal_static_FlushRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_FlushRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FlushRegionRequest_descriptor, + new java.lang.String[] { "Region", "IfOlderThanTs", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionRequest.Builder.class); + internal_static_FlushRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_FlushRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FlushRegionResponse_descriptor, + new java.lang.String[] { "LastFlushTime", "Flushed", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.FlushRegionResponse.Builder.class); + internal_static_SplitRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_SplitRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SplitRegionRequest_descriptor, + new java.lang.String[] { "Region", "SplitPoint", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionRequest.Builder.class); + internal_static_SplitRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_SplitRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_SplitRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.SplitRegionResponse.Builder.class); + internal_static_CompactRegionRequest_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_CompactRegionRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CompactRegionRequest_descriptor, + new java.lang.String[] { "Region", "Major", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionRequest.Builder.class); + internal_static_CompactRegionResponse_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_CompactRegionResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CompactRegionResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.CompactRegionResponse.Builder.class); + internal_static_UUID_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_UUID_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UUID_descriptor, + new java.lang.String[] { "LeastSigBits", "MostSigBits", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.UUID.Builder.class); + internal_static_WALEntry_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_WALEntry_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_descriptor, + new java.lang.String[] { "WalKey", "Edit", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.Builder.class); + internal_static_WALEntry_WALKey_descriptor = + internal_static_WALEntry_descriptor.getNestedTypes().get(0); + internal_static_WALEntry_WALKey_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_WALKey_descriptor, + new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALKey.Builder.class); + internal_static_WALEntry_WALEdit_descriptor = + internal_static_WALEntry_descriptor.getNestedTypes().get(1); + internal_static_WALEntry_WALEdit_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_WALEdit_descriptor, + new java.lang.String[] { "KeyValue", "FamilyScope", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.Builder.class); + internal_static_WALEntry_WALEdit_FamilyScope_descriptor = + internal_static_WALEntry_WALEdit_descriptor.getNestedTypes().get(0); + internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALEntry_WALEdit_FamilyScope_descriptor, + new java.lang.String[] { "Family", "ScopeType", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); + internal_static_ReplicateWALEntryRequest_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_ReplicateWALEntryRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicateWALEntryRequest_descriptor, + new java.lang.String[] { "WalEntry", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryRequest.Builder.class); + internal_static_ReplicateWALEntryResponse_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_ReplicateWALEntryResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ReplicateWALEntryResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.ReplicateWALEntryResponse.Builder.class); + internal_static_RollWALWriterRequest_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_RollWALWriterRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RollWALWriterRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterRequest.Builder.class); + internal_static_RollWALWriterResponse_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_RollWALWriterResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_RollWALWriterResponse_descriptor, + new java.lang.String[] { "RegionToFlush", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.RollWALWriterResponse.Builder.class); + internal_static_StopServerRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_StopServerRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_StopServerRequest_descriptor, + new java.lang.String[] { "Reason", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerRequest.Builder.class); + internal_static_StopServerResponse_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_StopServerResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_StopServerResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.StopServerResponse.Builder.class); + internal_static_GetServerInfoRequest_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_GetServerInfoRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetServerInfoRequest_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoRequest.Builder.class); + internal_static_GetServerInfoResponse_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_GetServerInfoResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetServerInfoResponse_descriptor, + new java.lang.String[] { "ServerName", }, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionAdminProtos.GetServerInfoResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java new file mode 100644 index 00000000000..b36a9c07ba0 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/generated/RegionClientProtos.java @@ -0,0 +1,21773 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: RegionClient.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class RegionClientProtos { + private RegionClientProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface ColumnOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // repeated bytes qualifier = 2; + java.util.List getQualifierList(); + int getQualifierCount(); + com.google.protobuf.ByteString getQualifier(int index); + } + public static final class Column extends + com.google.protobuf.GeneratedMessage + implements ColumnOrBuilder { + // Use Column.newBuilder() to construct. + private Column(Builder builder) { + super(builder); + } + private Column(boolean noInit) {} + + private static final Column defaultInstance; + public static Column getDefaultInstance() { + return defaultInstance; + } + + public Column getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // repeated bytes qualifier = 2; + public static final int QUALIFIER_FIELD_NUMBER = 2; + private java.util.List qualifier_; + public java.util.List + getQualifierList() { + return qualifier_; + } + public int getQualifierCount() { + return qualifier_.size(); + } + public com.google.protobuf.ByteString getQualifier(int index) { + return qualifier_.get(index); + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + for (int i = 0; i < qualifier_.size(); i++) { + output.writeBytes(2, qualifier_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + { + int dataSize = 0; + for (int i = 0; i < qualifier_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(qualifier_.get(i)); + } + size += dataSize; + size += 1 * getQualifierList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && getQualifierList() + .equals(other.getQualifierList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (getQualifierCount() > 0) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifierList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Column_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + qualifier_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((bitField0_ & 0x00000002) == 0x00000002)) { + qualifier_ = java.util.Collections.unmodifiableList(qualifier_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.qualifier_ = qualifier_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (!other.qualifier_.isEmpty()) { + if (qualifier_.isEmpty()) { + qualifier_ = other.qualifier_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureQualifierIsMutable(); + qualifier_.addAll(other.qualifier_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + ensureQualifierIsMutable(); + qualifier_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // repeated bytes qualifier = 2; + private java.util.List qualifier_ = java.util.Collections.emptyList();; + private void ensureQualifierIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + qualifier_ = new java.util.ArrayList(qualifier_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getQualifierList() { + return java.util.Collections.unmodifiableList(qualifier_); + } + public int getQualifierCount() { + return qualifier_.size(); + } + public com.google.protobuf.ByteString getQualifier(int index) { + return qualifier_.get(index); + } + public Builder setQualifier( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierIsMutable(); + qualifier_.set(index, value); + onChanged(); + return this; + } + public Builder addQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierIsMutable(); + qualifier_.add(value); + onChanged(); + return this; + } + public Builder addAllQualifier( + java.lang.Iterable values) { + ensureQualifierIsMutable(); + super.addAll(values, qualifier_); + onChanged(); + return this; + } + public Builder clearQualifier() { + qualifier_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Column) + } + + static { + defaultInstance = new Column(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Column) + } + + public interface AttributeOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // optional bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class Attribute extends + com.google.protobuf.GeneratedMessage + implements AttributeOrBuilder { + // Use Attribute.newBuilder() to construct. + private Attribute(Builder builder) { + super(builder); + } + private Attribute(boolean noInit) {} + + private static final Attribute defaultInstance; + public static Attribute getDefaultInstance() { + return defaultInstance; + } + + public Attribute getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + name_ = ""; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Attribute_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // optional bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Attribute) + } + + static { + defaultInstance = new Attribute(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Attribute) + } + + public interface GetOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // repeated .Column column = 2; + java.util.List + getColumnList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index); + int getColumnCount(); + java.util.List + getColumnOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index); + + // repeated .Attribute attribute = 3; + java.util.List + getAttributeList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index); + int getAttributeCount(); + java.util.List + getAttributeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + int index); + + // optional uint64 lockId = 4; + boolean hasLockId(); + long getLockId(); + + // optional .Parameter filter = 5; + boolean hasFilter(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder(); + + // optional .TimeRange timeRange = 6; + boolean hasTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); + + // optional uint32 maxVersions = 7 [default = 1]; + boolean hasMaxVersions(); + int getMaxVersions(); + + // optional bool cacheBlocks = 8 [default = true]; + boolean hasCacheBlocks(); + boolean getCacheBlocks(); + } + public static final class Get extends + com.google.protobuf.GeneratedMessage + implements GetOrBuilder { + // Use Get.newBuilder() to construct. + private Get(Builder builder) { + super(builder); + } + private Get(boolean noInit) {} + + private static final Get defaultInstance; + public static Get getDefaultInstance() { + return defaultInstance; + } + + public Get getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // repeated .Column column = 2; + public static final int COLUMN_FIELD_NUMBER = 2; + private java.util.List column_; + public java.util.List getColumnList() { + return column_; + } + public java.util.List + getColumnOrBuilderList() { + return column_; + } + public int getColumnCount() { + return column_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { + return column_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + return column_.get(index); + } + + // repeated .Attribute attribute = 3; + public static final int ATTRIBUTE_FIELD_NUMBER = 3; + private java.util.List attribute_; + public java.util.List getAttributeList() { + return attribute_; + } + public java.util.List + getAttributeOrBuilderList() { + return attribute_; + } + public int getAttributeCount() { + return attribute_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + return attribute_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + int index) { + return attribute_.get(index); + } + + // optional uint64 lockId = 4; + public static final int LOCKID_FIELD_NUMBER = 4; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getLockId() { + return lockId_; + } + + // optional .Parameter filter = 5; + public static final int FILTER_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { + return filter_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { + return filter_; + } + + // optional .TimeRange timeRange = 6; + public static final int TIMERANGE_FIELD_NUMBER = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + return timeRange_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + return timeRange_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + public static final int MAXVERSIONS_FIELD_NUMBER = 7; + private int maxVersions_; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public int getMaxVersions() { + return maxVersions_; + } + + // optional bool cacheBlocks = 8 [default = true]; + public static final int CACHEBLOCKS_FIELD_NUMBER = 8; + private boolean cacheBlocks_; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + column_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + lockId_ = 0L; + filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + maxVersions_ = 1; + cacheBlocks_ = true; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + for (int i = 0; i < column_.size(); i++) { + output.writeMessage(2, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + output.writeMessage(3, attribute_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(4, lockId_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeUInt32(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(8, cacheBlocks_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + for (int i = 0; i < column_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, attribute_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, lockId_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(8, cacheBlocks_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && getColumnList() + .equals(other.getColumnList()); + result = result && getAttributeList() + .equals(other.getAttributeList()); + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && (hasTimeRange() == other.hasTimeRange()); + if (hasTimeRange()) { + result = result && getTimeRange() + .equals(other.getTimeRange()); + } + result = result && (hasMaxVersions() == other.hasMaxVersions()); + if (hasMaxVersions()) { + result = result && (getMaxVersions() + == other.getMaxVersions()); + } + result = result && (hasCacheBlocks() == other.hasCacheBlocks()); + if (hasCacheBlocks()) { + result = result && (getCacheBlocks() + == other.getCacheBlocks()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (getColumnCount() > 0) { + hash = (37 * hash) + COLUMN_FIELD_NUMBER; + hash = (53 * hash) + getColumnList().hashCode(); + } + if (getAttributeCount() > 0) { + hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; + hash = (53 * hash) + getAttributeList().hashCode(); + } + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + if (hasTimeRange()) { + hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; + hash = (53 * hash) + getTimeRange().hashCode(); + } + if (hasMaxVersions()) { + hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; + hash = (53 * hash) + getMaxVersions(); + } + if (hasCacheBlocks()) { + hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCacheBlocks()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Get_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFieldBuilder(); + getAttributeFieldBuilder(); + getFilterFieldBuilder(); + getTimeRangeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + columnBuilder_.clear(); + } + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + attributeBuilder_.clear(); + } + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + maxVersions_ = 1; + bitField0_ = (bitField0_ & ~0x00000040); + cacheBlocks_ = true; + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (columnBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + column_ = java.util.Collections.unmodifiableList(column_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.column_ = column_; + } else { + result.column_ = columnBuilder_.build(); + } + if (attributeBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.attribute_ = attribute_; + } else { + result.attribute_ = attributeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000002; + } + result.lockId_ = lockId_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000004; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + if (timeRangeBuilder_ == null) { + result.timeRange_ = timeRange_; + } else { + result.timeRange_ = timeRangeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.maxVersions_ = maxVersions_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + result.cacheBlocks_ = cacheBlocks_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (columnBuilder_ == null) { + if (!other.column_.isEmpty()) { + if (column_.isEmpty()) { + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureColumnIsMutable(); + column_.addAll(other.column_); + } + onChanged(); + } + } else { + if (!other.column_.isEmpty()) { + if (columnBuilder_.isEmpty()) { + columnBuilder_.dispose(); + columnBuilder_ = null; + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000002); + columnBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnFieldBuilder() : null; + } else { + columnBuilder_.addAllMessages(other.column_); + } + } + } + if (attributeBuilder_ == null) { + if (!other.attribute_.isEmpty()) { + if (attribute_.isEmpty()) { + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureAttributeIsMutable(); + attribute_.addAll(other.attribute_); + } + onChanged(); + } + } else { + if (!other.attribute_.isEmpty()) { + if (attributeBuilder_.isEmpty()) { + attributeBuilder_.dispose(); + attributeBuilder_ = null; + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000004); + attributeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributeFieldBuilder() : null; + } else { + attributeBuilder_.addAllMessages(other.attribute_); + } + } + } + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + if (other.hasTimeRange()) { + mergeTimeRange(other.getTimeRange()); + } + if (other.hasMaxVersions()) { + setMaxVersions(other.getMaxVersions()); + } + if (other.hasCacheBlocks()) { + setCacheBlocks(other.getCacheBlocks()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumn(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttribute(subBuilder.buildPartial()); + break; + } + case 32: { + bitField0_ |= 0x00000008; + lockId_ = input.readUInt64(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + if (hasFilter()) { + subBuilder.mergeFrom(getFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setFilter(subBuilder.buildPartial()); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); + if (hasTimeRange()) { + subBuilder.mergeFrom(getTimeRange()); + } + input.readMessage(subBuilder, extensionRegistry); + setTimeRange(subBuilder.buildPartial()); + break; + } + case 56: { + bitField0_ |= 0x00000040; + maxVersions_ = input.readUInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + cacheBlocks_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // repeated .Column column = 2; + private java.util.List column_ = + java.util.Collections.emptyList(); + private void ensureColumnIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + column_ = new java.util.ArrayList(column_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> columnBuilder_; + + public java.util.List getColumnList() { + if (columnBuilder_ == null) { + return java.util.Collections.unmodifiableList(column_); + } else { + return columnBuilder_.getMessageList(); + } + } + public int getColumnCount() { + if (columnBuilder_ == null) { + return column_.size(); + } else { + return columnBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { + if (columnBuilder_ == null) { + return column_.get(index); + } else { + return columnBuilder_.getMessage(index); + } + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.set(index, value); + onChanged(); + } else { + columnBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.set(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(value); + onChanged(); + } else { + columnBuilder_.addMessage(value); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(index, value); + onChanged(); + } else { + columnBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumn( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumn( + java.lang.Iterable values) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + super.addAll(values, column_); + onChanged(); + } else { + columnBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumn() { + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + columnBuilder_.clear(); + } + return this; + } + public Builder removeColumn(int index) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.remove(index); + onChanged(); + } else { + columnBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder getColumnBuilder( + int index) { + return getColumnFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + if (columnBuilder_ == null) { + return column_.get(index); } else { + return columnBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnOrBuilderList() { + if (columnBuilder_ != null) { + return columnBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(column_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder() { + return getColumnFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder( + int index) { + return getColumnFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); + } + public java.util.List + getColumnBuilderList() { + return getColumnFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> + getColumnFieldBuilder() { + if (columnBuilder_ == null) { + columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder>( + column_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + column_ = null; + } + return columnBuilder_; + } + + // repeated .Attribute attribute = 3; + private java.util.List attribute_ = + java.util.Collections.emptyList(); + private void ensureAttributeIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + attribute_ = new java.util.ArrayList(attribute_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> attributeBuilder_; + + public java.util.List getAttributeList() { + if (attributeBuilder_ == null) { + return java.util.Collections.unmodifiableList(attribute_); + } else { + return attributeBuilder_.getMessageList(); + } + } + public int getAttributeCount() { + if (attributeBuilder_ == null) { + return attribute_.size(); + } else { + return attributeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); + } else { + return attributeBuilder_.getMessage(index); + } + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.set(index, value); + onChanged(); + } else { + attributeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.set(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(value); + onChanged(); + } else { + attributeBuilder_.addMessage(value); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(index, value); + onChanged(); + } else { + attributeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttribute( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttribute( + java.lang.Iterable values) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + super.addAll(values, attribute_); + onChanged(); + } else { + attributeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttribute() { + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + attributeBuilder_.clear(); + } + return this; + } + public Builder removeAttribute(int index) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.remove(index); + onChanged(); + } else { + attributeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder getAttributeBuilder( + int index) { + return getAttributeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); } else { + return attributeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributeOrBuilderList() { + if (attributeBuilder_ != null) { + return attributeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attribute_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder() { + return getAttributeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder( + int index) { + return getAttributeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + } + public java.util.List + getAttributeBuilderList() { + return getAttributeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> + getAttributeFieldBuilder() { + if (attributeBuilder_ == null) { + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder>( + attribute_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + attribute_ = null; + } + return attributeBuilder_; + } + + // optional uint64 lockId = 4; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000008; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000008); + lockId_ = 0L; + onChanged(); + return this; + } + + // optional .Parameter filter = 5; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> filterBuilder_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setFilter( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + filter_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) { + filter_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getFilterBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // optional .TimeRange timeRange = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + if (timeRangeBuilder_ == null) { + return timeRange_; + } else { + return timeRangeBuilder_.getMessage(); + } + } + public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timeRange_ = value; + onChanged(); + } else { + timeRangeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder setTimeRange( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { + if (timeRangeBuilder_ == null) { + timeRange_ = builderForValue.build(); + onChanged(); + } else { + timeRangeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020) && + timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { + timeRange_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); + } else { + timeRange_ = value; + } + onChanged(); + } else { + timeRangeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder clearTimeRange() { + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + onChanged(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getTimeRangeFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + if (timeRangeBuilder_ != null) { + return timeRangeBuilder_.getMessageOrBuilder(); + } else { + return timeRange_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> + getTimeRangeFieldBuilder() { + if (timeRangeBuilder_ == null) { + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( + timeRange_, + getParentForChildren(), + isClean()); + timeRange_ = null; + } + return timeRangeBuilder_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + private int maxVersions_ = 1; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getMaxVersions() { + return maxVersions_; + } + public Builder setMaxVersions(int value) { + bitField0_ |= 0x00000040; + maxVersions_ = value; + onChanged(); + return this; + } + public Builder clearMaxVersions() { + bitField0_ = (bitField0_ & ~0x00000040); + maxVersions_ = 1; + onChanged(); + return this; + } + + // optional bool cacheBlocks = 8 [default = true]; + private boolean cacheBlocks_ = true; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + public Builder setCacheBlocks(boolean value) { + bitField0_ |= 0x00000080; + cacheBlocks_ = value; + onChanged(); + return this; + } + public Builder clearCacheBlocks() { + bitField0_ = (bitField0_ & ~0x00000080); + cacheBlocks_ = true; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Get) + } + + static { + defaultInstance = new Get(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Get) + } + + public interface ResultOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .KeyValue value = 1; + java.util.List + getValueList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getValue(int index); + int getValueCount(); + java.util.List + getValueOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getValueOrBuilder( + int index); + } + public static final class Result extends + com.google.protobuf.GeneratedMessage + implements ResultOrBuilder { + // Use Result.newBuilder() to construct. + private Result(Builder builder) { + super(builder); + } + private Result(boolean noInit) {} + + private static final Result defaultInstance; + public static Result getDefaultInstance() { + return defaultInstance; + } + + public Result getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_fieldAccessorTable; + } + + // repeated .KeyValue value = 1; + public static final int VALUE_FIELD_NUMBER = 1; + private java.util.List value_; + public java.util.List getValueList() { + return value_; + } + public java.util.List + getValueOrBuilderList() { + return value_; + } + public int getValueCount() { + return value_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getValue(int index) { + return value_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getValueOrBuilder( + int index) { + return value_.get(index); + } + + private void initFields() { + value_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getValueCount(); i++) { + if (!getValue(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < value_.size(); i++) { + output.writeMessage(1, value_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < value_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, value_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result) obj; + + boolean result = true; + result = result && getValueList() + .equals(other.getValueList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getValueCount() > 0) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValueList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Result_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getValueFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (valueBuilder_ == null) { + value_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + valueBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result(this); + int from_bitField0_ = bitField0_; + if (valueBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + value_ = java.util.Collections.unmodifiableList(value_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()) return this; + if (valueBuilder_ == null) { + if (!other.value_.isEmpty()) { + if (value_.isEmpty()) { + value_ = other.value_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureValueIsMutable(); + value_.addAll(other.value_); + } + onChanged(); + } + } else { + if (!other.value_.isEmpty()) { + if (valueBuilder_.isEmpty()) { + valueBuilder_.dispose(); + valueBuilder_ = null; + value_ = other.value_; + bitField0_ = (bitField0_ & ~0x00000001); + valueBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getValueFieldBuilder() : null; + } else { + valueBuilder_.addAllMessages(other.value_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getValueCount(); i++) { + if (!getValue(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addValue(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .KeyValue value = 1; + private java.util.List value_ = + java.util.Collections.emptyList(); + private void ensureValueIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + value_ = new java.util.ArrayList(value_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> valueBuilder_; + + public java.util.List getValueList() { + if (valueBuilder_ == null) { + return java.util.Collections.unmodifiableList(value_); + } else { + return valueBuilder_.getMessageList(); + } + } + public int getValueCount() { + if (valueBuilder_ == null) { + return value_.size(); + } else { + return valueBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue getValue(int index) { + if (valueBuilder_ == null) { + return value_.get(index); + } else { + return valueBuilder_.getMessage(index); + } + } + public Builder setValue( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureValueIsMutable(); + value_.set(index, value); + onChanged(); + } else { + valueBuilder_.setMessage(index, value); + } + return this; + } + public Builder setValue( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { + if (valueBuilder_ == null) { + ensureValueIsMutable(); + value_.set(index, builderForValue.build()); + onChanged(); + } else { + valueBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureValueIsMutable(); + value_.add(value); + onChanged(); + } else { + valueBuilder_.addMessage(value); + } + return this; + } + public Builder addValue( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureValueIsMutable(); + value_.add(index, value); + onChanged(); + } else { + valueBuilder_.addMessage(index, value); + } + return this; + } + public Builder addValue( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { + if (valueBuilder_ == null) { + ensureValueIsMutable(); + value_.add(builderForValue.build()); + onChanged(); + } else { + valueBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addValue( + int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder builderForValue) { + if (valueBuilder_ == null) { + ensureValueIsMutable(); + value_.add(index, builderForValue.build()); + onChanged(); + } else { + valueBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllValue( + java.lang.Iterable values) { + if (valueBuilder_ == null) { + ensureValueIsMutable(); + super.addAll(values, value_); + onChanged(); + } else { + valueBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + valueBuilder_.clear(); + } + return this; + } + public Builder removeValue(int index) { + if (valueBuilder_ == null) { + ensureValueIsMutable(); + value_.remove(index); + onChanged(); + } else { + valueBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder getValueBuilder( + int index) { + return getValueFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder getValueOrBuilder( + int index) { + if (valueBuilder_ == null) { + return value_.get(index); } else { + return valueBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getValueOrBuilderList() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(value_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addValueBuilder() { + return getValueFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder addValueBuilder( + int index) { + return getValueFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.getDefaultInstance()); + } + public java.util.List + getValueBuilderList() { + return getValueFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValue.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.KeyValueOrBuilder>( + value_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + value_ = null; + } + return valueBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Result) + } + + static { + defaultInstance = new Result(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Result) + } + + public interface GetRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .Get get = 2; + boolean hasGet(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getGet(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder getGetOrBuilder(); + + // optional bool closestRowBefore = 3; + boolean hasClosestRowBefore(); + boolean getClosestRowBefore(); + + // optional bool existenceOnly = 4; + boolean hasExistenceOnly(); + boolean getExistenceOnly(); + } + public static final class GetRequest extends + com.google.protobuf.GeneratedMessage + implements GetRequestOrBuilder { + // Use GetRequest.newBuilder() to construct. + private GetRequest(Builder builder) { + super(builder); + } + private GetRequest(boolean noInit) {} + + private static final GetRequest defaultInstance; + public static GetRequest getDefaultInstance() { + return defaultInstance; + } + + public GetRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .Get get = 2; + public static final int GET_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get get_; + public boolean hasGet() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getGet() { + return get_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder getGetOrBuilder() { + return get_; + } + + // optional bool closestRowBefore = 3; + public static final int CLOSESTROWBEFORE_FIELD_NUMBER = 3; + private boolean closestRowBefore_; + public boolean hasClosestRowBefore() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getClosestRowBefore() { + return closestRowBefore_; + } + + // optional bool existenceOnly = 4; + public static final int EXISTENCEONLY_FIELD_NUMBER = 4; + private boolean existenceOnly_; + public boolean hasExistenceOnly() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getExistenceOnly() { + return existenceOnly_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + closestRowBefore_ = false; + existenceOnly_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasGet()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getGet().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, get_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(3, closestRowBefore_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBool(4, existenceOnly_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, get_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, closestRowBefore_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, existenceOnly_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasGet() == other.hasGet()); + if (hasGet()) { + result = result && getGet() + .equals(other.getGet()); + } + result = result && (hasClosestRowBefore() == other.hasClosestRowBefore()); + if (hasClosestRowBefore()) { + result = result && (getClosestRowBefore() + == other.getClosestRowBefore()); + } + result = result && (hasExistenceOnly() == other.hasExistenceOnly()); + if (hasExistenceOnly()) { + result = result && (getExistenceOnly() + == other.getExistenceOnly()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasGet()) { + hash = (37 * hash) + GET_FIELD_NUMBER; + hash = (53 * hash) + getGet().hashCode(); + } + if (hasClosestRowBefore()) { + hash = (37 * hash) + CLOSESTROWBEFORE_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getClosestRowBefore()); + } + if (hasExistenceOnly()) { + hash = (37 * hash) + EXISTENCEONLY_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getExistenceOnly()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getGetFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (getBuilder_ == null) { + get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + } else { + getBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + closestRowBefore_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + existenceOnly_ = false; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (getBuilder_ == null) { + result.get_ = get_; + } else { + result.get_ = getBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.closestRowBefore_ = closestRowBefore_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.existenceOnly_ = existenceOnly_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasGet()) { + mergeGet(other.getGet()); + } + if (other.hasClosestRowBefore()) { + setClosestRowBefore(other.getClosestRowBefore()); + } + if (other.hasExistenceOnly()) { + setExistenceOnly(other.getExistenceOnly()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasGet()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getGet().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.newBuilder(); + if (hasGet()) { + subBuilder.mergeFrom(getGet()); + } + input.readMessage(subBuilder, extensionRegistry); + setGet(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + closestRowBefore_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + existenceOnly_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .Get get = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder> getBuilder_; + public boolean hasGet() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get getGet() { + if (getBuilder_ == null) { + return get_; + } else { + return getBuilder_.getMessage(); + } + } + public Builder setGet(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get value) { + if (getBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + get_ = value; + onChanged(); + } else { + getBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setGet( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder builderForValue) { + if (getBuilder_ == null) { + get_ = builderForValue.build(); + onChanged(); + } else { + getBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeGet(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get value) { + if (getBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + get_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance()) { + get_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.newBuilder(get_).mergeFrom(value).buildPartial(); + } else { + get_ = value; + } + onChanged(); + } else { + getBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearGet() { + if (getBuilder_ == null) { + get_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.getDefaultInstance(); + onChanged(); + } else { + getBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder getGetBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getGetFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder getGetOrBuilder() { + if (getBuilder_ != null) { + return getBuilder_.getMessageOrBuilder(); + } else { + return get_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder> + getGetFieldBuilder() { + if (getBuilder_ == null) { + getBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetOrBuilder>( + get_, + getParentForChildren(), + isClean()); + get_ = null; + } + return getBuilder_; + } + + // optional bool closestRowBefore = 3; + private boolean closestRowBefore_ ; + public boolean hasClosestRowBefore() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getClosestRowBefore() { + return closestRowBefore_; + } + public Builder setClosestRowBefore(boolean value) { + bitField0_ |= 0x00000004; + closestRowBefore_ = value; + onChanged(); + return this; + } + public Builder clearClosestRowBefore() { + bitField0_ = (bitField0_ & ~0x00000004); + closestRowBefore_ = false; + onChanged(); + return this; + } + + // optional bool existenceOnly = 4; + private boolean existenceOnly_ ; + public boolean hasExistenceOnly() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getExistenceOnly() { + return existenceOnly_; + } + public Builder setExistenceOnly(boolean value) { + bitField0_ |= 0x00000008; + existenceOnly_ = value; + onChanged(); + return this; + } + public Builder clearExistenceOnly() { + bitField0_ = (bitField0_ & ~0x00000008); + existenceOnly_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetRequest) + } + + static { + defaultInstance = new GetRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetRequest) + } + + public interface GetResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .Result result = 1; + boolean hasResult(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder(); + + // optional bool exists = 2; + boolean hasExists(); + boolean getExists(); + } + public static final class GetResponse extends + com.google.protobuf.GeneratedMessage + implements GetResponseOrBuilder { + // Use GetResponse.newBuilder() to construct. + private GetResponse(Builder builder) { + super(builder); + } + private GetResponse(boolean noInit) {} + + private static final GetResponse defaultInstance; + public static GetResponse getDefaultInstance() { + return defaultInstance; + } + + public GetResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { + return result_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { + return result_; + } + + // optional bool exists = 2; + public static final int EXISTS_FIELD_NUMBER = 2; + private boolean exists_; + public boolean hasExists() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getExists() { + return exists_; + } + + private void initFields() { + result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + exists_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasResult()) { + if (!getResult().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, exists_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, exists_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse) obj; + + boolean result = true; + result = result && (hasResult() == other.hasResult()); + if (hasResult()) { + result = result && getResult() + .equals(other.getResult()); + } + result = result && (hasExists() == other.hasExists()); + if (hasExists()) { + result = result && (getExists() + == other.getExists()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResult()) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResult().hashCode(); + } + if (hasExists()) { + hash = (37 * hash) + EXISTS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getExists()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_GetResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + exists_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (resultBuilder_ == null) { + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.exists_ = exists_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance()) return this; + if (other.hasResult()) { + mergeResult(other.getResult()); + } + if (other.hasExists()) { + setExists(other.getExists()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasResult()) { + if (!getResult().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(); + if (hasResult()) { + subBuilder.mergeFrom(getResult()); + } + input.readMessage(subBuilder, extensionRegistry); + setResult(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + exists_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional .Result result = 1; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> resultBuilder_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { + if (resultBuilder_ == null) { + return result_; + } else { + return resultBuilder_.getMessage(); + } + } + public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + result_ = value; + onChanged(); + } else { + resultBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setResult( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + result_ = builderForValue.build(); + onChanged(); + } else { + resultBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + result_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()) { + result_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); + } else { + result_ = value; + } + onChanged(); + } else { + resultBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + onChanged(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder getResultBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getResultFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilder(); + } else { + return result_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder>( + result_, + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional bool exists = 2; + private boolean exists_ ; + public boolean hasExists() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getExists() { + return exists_; + } + public Builder setExists(boolean value) { + bitField0_ |= 0x00000002; + exists_ = value; + onChanged(); + return this; + } + public Builder clearExists() { + bitField0_ = (bitField0_ & ~0x00000002); + exists_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:GetResponse) + } + + static { + defaultInstance = new GetResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:GetResponse) + } + + public interface ConditionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required bytes family = 2; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required bytes qualifier = 3; + boolean hasQualifier(); + com.google.protobuf.ByteString getQualifier(); + + // required .Condition.CompareType compareType = 4; + boolean hasCompareType(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType getCompareType(); + + // required .Condition.Comparator comparator = 5; + boolean hasComparator(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator getComparator(); + + // optional bytes value = 6; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + } + public static final class Condition extends + com.google.protobuf.GeneratedMessage + implements ConditionOrBuilder { + // Use Condition.newBuilder() to construct. + private Condition(Builder builder) { + super(builder); + } + private Condition(boolean noInit) {} + + private static final Condition defaultInstance; + public static Condition getDefaultInstance() { + return defaultInstance; + } + + public Condition getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_fieldAccessorTable; + } + + public enum CompareType + implements com.google.protobuf.ProtocolMessageEnum { + LESS(0, 0), + LESS_OR_EQUAL(1, 1), + EQUAL(2, 2), + NOT_EQUAL(3, 3), + GREATER_OR_EQUAL(4, 4), + GREATER(5, 5), + NO_OP(6, 6), + ; + + public static final int LESS_VALUE = 0; + public static final int LESS_OR_EQUAL_VALUE = 1; + public static final int EQUAL_VALUE = 2; + public static final int NOT_EQUAL_VALUE = 3; + public static final int GREATER_OR_EQUAL_VALUE = 4; + public static final int GREATER_VALUE = 5; + public static final int NO_OP_VALUE = 6; + + + public final int getNumber() { return value; } + + public static CompareType valueOf(int value) { + switch (value) { + case 0: return LESS; + case 1: return LESS_OR_EQUAL; + case 2: return EQUAL; + case 3: return NOT_EQUAL; + case 4: return GREATER_OR_EQUAL; + case 5: return GREATER; + case 6: return NO_OP; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public CompareType findValueByNumber(int number) { + return CompareType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDescriptor().getEnumTypes().get(0); + } + + private static final CompareType[] VALUES = { + LESS, LESS_OR_EQUAL, EQUAL, NOT_EQUAL, GREATER_OR_EQUAL, GREATER, NO_OP, + }; + + public static CompareType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private CompareType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Condition.CompareType) + } + + public enum Comparator + implements com.google.protobuf.ProtocolMessageEnum { + BINARY_COMPARATOR(0, 0), + BINARY_PREFIX_COMPARATOR(1, 1), + BIT_AND_COMPARATOR(2, 2), + BIT_OR_COMPARATOR(3, 3), + BIT_XOR_COMPARATOR(4, 4), + NULL_COMPARATOR(5, 5), + REGEX_STRING_COMPARATOR(6, 6), + SUBSTRING_COMPARATOR(7, 7), + ; + + public static final int BINARY_COMPARATOR_VALUE = 0; + public static final int BINARY_PREFIX_COMPARATOR_VALUE = 1; + public static final int BIT_AND_COMPARATOR_VALUE = 2; + public static final int BIT_OR_COMPARATOR_VALUE = 3; + public static final int BIT_XOR_COMPARATOR_VALUE = 4; + public static final int NULL_COMPARATOR_VALUE = 5; + public static final int REGEX_STRING_COMPARATOR_VALUE = 6; + public static final int SUBSTRING_COMPARATOR_VALUE = 7; + + + public final int getNumber() { return value; } + + public static Comparator valueOf(int value) { + switch (value) { + case 0: return BINARY_COMPARATOR; + case 1: return BINARY_PREFIX_COMPARATOR; + case 2: return BIT_AND_COMPARATOR; + case 3: return BIT_OR_COMPARATOR; + case 4: return BIT_XOR_COMPARATOR; + case 5: return NULL_COMPARATOR; + case 6: return REGEX_STRING_COMPARATOR; + case 7: return SUBSTRING_COMPARATOR; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public Comparator findValueByNumber(int number) { + return Comparator.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDescriptor().getEnumTypes().get(1); + } + + private static final Comparator[] VALUES = { + BINARY_COMPARATOR, BINARY_PREFIX_COMPARATOR, BIT_AND_COMPARATOR, BIT_OR_COMPARATOR, BIT_XOR_COMPARATOR, NULL_COMPARATOR, REGEX_STRING_COMPARATOR, SUBSTRING_COMPARATOR, + }; + + public static Comparator valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private Comparator(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Condition.Comparator) + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required bytes family = 2; + public static final int FAMILY_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required bytes qualifier = 3; + public static final int QUALIFIER_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString qualifier_; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + // required .Condition.CompareType compareType = 4; + public static final int COMPARETYPE_FIELD_NUMBER = 4; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType compareType_; + public boolean hasCompareType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType getCompareType() { + return compareType_; + } + + // required .Condition.Comparator comparator = 5; + public static final int COMPARATOR_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator comparator_; + public boolean hasComparator() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator getComparator() { + return comparator_; + } + + // optional bytes value = 6; + public static final int VALUE_FIELD_NUMBER = 6; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + family_ = com.google.protobuf.ByteString.EMPTY; + qualifier_ = com.google.protobuf.ByteString.EMPTY; + compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; + comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; + value_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasQualifier()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCompareType()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasComparator()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeEnum(4, compareType_.getNumber()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeEnum(5, comparator_.getNumber()); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBytes(6, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, family_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, qualifier_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(4, compareType_.getNumber()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(5, comparator_.getNumber()); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(6, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && (hasCompareType() == other.hasCompareType()); + if (hasCompareType()) { + result = result && + (getCompareType() == other.getCompareType()); + } + result = result && (hasComparator() == other.hasComparator()); + if (hasComparator()) { + result = result && + (getComparator() == other.getComparator()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + if (hasCompareType()) { + hash = (37 * hash) + COMPARETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getCompareType()); + } + if (hasComparator()) { + hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getComparator()); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Condition_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; + bitField0_ = (bitField0_ & ~0x00000008); + comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; + bitField0_ = (bitField0_ & ~0x00000010); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.qualifier_ = qualifier_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.compareType_ = compareType_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.comparator_ = comparator_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000020; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + if (other.hasCompareType()) { + setCompareType(other.getCompareType()); + } + if (other.hasComparator()) { + setComparator(other.getComparator()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasFamily()) { + + return false; + } + if (!hasQualifier()) { + + return false; + } + if (!hasCompareType()) { + + return false; + } + if (!hasComparator()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + family_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + qualifier_ = input.readBytes(); + break; + } + case 32: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType value = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(4, rawValue); + } else { + bitField0_ |= 0x00000008; + compareType_ = value; + } + break; + } + case 40: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator value = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(5, rawValue); + } else { + bitField0_ |= 0x00000010; + comparator_ = value; + } + break; + } + case 50: { + bitField0_ |= 0x00000020; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required bytes family = 2; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000002); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required bytes qualifier = 3; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + qualifier_ = value; + onChanged(); + return this; + } + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000004); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // required .Condition.CompareType compareType = 4; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; + public boolean hasCompareType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType getCompareType() { + return compareType_; + } + public Builder setCompareType(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + compareType_ = value; + onChanged(); + return this; + } + public Builder clearCompareType() { + bitField0_ = (bitField0_ & ~0x00000008); + compareType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.CompareType.LESS; + onChanged(); + return this; + } + + // required .Condition.Comparator comparator = 5; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; + public boolean hasComparator() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator getComparator() { + return comparator_; + } + public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + comparator_ = value; + onChanged(); + return this; + } + public Builder clearComparator() { + bitField0_ = (bitField0_ & ~0x00000010); + comparator_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Comparator.BINARY_COMPARATOR; + onChanged(); + return this; + } + + // optional bytes value = 6; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000020); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Condition) + } + + static { + defaultInstance = new Condition(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Condition) + } + + public interface MutateOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required .Mutate.MutateType mutateType = 2; + boolean hasMutateType(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType getMutateType(); + + // repeated .Mutate.ColumnValue columnValue = 3; + java.util.List + getColumnValueList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getColumnValue(int index); + int getColumnValueCount(); + java.util.List + getColumnValueOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + int index); + + // repeated .Attribute attribute = 4; + java.util.List + getAttributeList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index); + int getAttributeCount(); + java.util.List + getAttributeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + int index); + + // optional uint64 timestamp = 5; + boolean hasTimestamp(); + long getTimestamp(); + + // optional uint64 lockId = 6; + boolean hasLockId(); + long getLockId(); + + // optional bool writeToWAL = 7 [default = true]; + boolean hasWriteToWAL(); + boolean getWriteToWAL(); + + // optional .TimeRange timeRange = 10; + boolean hasTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); + } + public static final class Mutate extends + com.google.protobuf.GeneratedMessage + implements MutateOrBuilder { + // Use Mutate.newBuilder() to construct. + private Mutate(Builder builder) { + super(builder); + } + private Mutate(boolean noInit) {} + + private static final Mutate defaultInstance; + public static Mutate getDefaultInstance() { + return defaultInstance; + } + + public Mutate getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_fieldAccessorTable; + } + + public enum MutateType + implements com.google.protobuf.ProtocolMessageEnum { + APPEND(0, 0), + INCREMENT(1, 1), + PUT(2, 2), + DELETE(3, 3), + DELETE_COLUMN(4, 4), + DELETE_FAMILY(5, 5), + ; + + public static final int APPEND_VALUE = 0; + public static final int INCREMENT_VALUE = 1; + public static final int PUT_VALUE = 2; + public static final int DELETE_VALUE = 3; + public static final int DELETE_COLUMN_VALUE = 4; + public static final int DELETE_FAMILY_VALUE = 5; + + + public final int getNumber() { return value; } + + public static MutateType valueOf(int value) { + switch (value) { + case 0: return APPEND; + case 1: return INCREMENT; + case 2: return PUT; + case 3: return DELETE; + case 4: return DELETE_COLUMN; + case 5: return DELETE_FAMILY; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public MutateType findValueByNumber(int number) { + return MutateType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDescriptor().getEnumTypes().get(0); + } + + private static final MutateType[] VALUES = { + APPEND, INCREMENT, PUT, DELETE, DELETE_COLUMN, DELETE_FAMILY, + }; + + public static MutateType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private MutateType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:Mutate.MutateType) + } + + public interface ColumnValueOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; + java.util.List + getQualifierValueList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index); + int getQualifierValueCount(); + java.util.List + getQualifierValueOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + int index); + + // optional uint64 timestamp = 3; + boolean hasTimestamp(); + long getTimestamp(); + } + public static final class ColumnValue extends + com.google.protobuf.GeneratedMessage + implements ColumnValueOrBuilder { + // Use ColumnValue.newBuilder() to construct. + private ColumnValue(Builder builder) { + super(builder); + } + private ColumnValue(boolean noInit) {} + + private static final ColumnValue defaultInstance; + public static ColumnValue getDefaultInstance() { + return defaultInstance; + } + + public ColumnValue getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; + } + + public interface QualifierValueOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes qualifier = 1; + boolean hasQualifier(); + com.google.protobuf.ByteString getQualifier(); + + // optional bytes value = 2; + boolean hasValue(); + com.google.protobuf.ByteString getValue(); + + // optional uint64 timestamp = 3; + boolean hasTimestamp(); + long getTimestamp(); + } + public static final class QualifierValue extends + com.google.protobuf.GeneratedMessage + implements QualifierValueOrBuilder { + // Use QualifierValue.newBuilder() to construct. + private QualifierValue(Builder builder) { + super(builder); + } + private QualifierValue(boolean noInit) {} + + private static final QualifierValue defaultInstance; + public static QualifierValue getDefaultInstance() { + return defaultInstance; + } + + public QualifierValue getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + } + + private int bitField0_; + // required bytes qualifier = 1; + public static final int QUALIFIER_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString qualifier_; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + + // optional bytes value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + + // optional uint64 timestamp = 3; + public static final int TIMESTAMP_FIELD_NUMBER = 3; + private long timestamp_; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + + private void initFields() { + qualifier_ = com.google.protobuf.ByteString.EMPTY; + value_ = com.google.protobuf.ByteString.EMPTY; + timestamp_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasQualifier()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, qualifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, value_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, timestamp_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, qualifier_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, value_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, timestamp_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue) obj; + + boolean result = true; + result = result && (hasQualifier() == other.hasQualifier()); + if (hasQualifier()) { + result = result && getQualifier() + .equals(other.getQualifier()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasQualifier()) { + hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; + hash = (53 * hash) + getQualifier().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + qualifier_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.qualifier_ = qualifier_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.timestamp_ = timestamp_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()) return this; + if (other.hasQualifier()) { + setQualifier(other.getQualifier()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasQualifier()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + qualifier_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + timestamp_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes qualifier = 1; + private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasQualifier() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getQualifier() { + return qualifier_; + } + public Builder setQualifier(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + qualifier_ = value; + onChanged(); + return this; + } + public Builder clearQualifier() { + bitField0_ = (bitField0_ & ~0x00000001); + qualifier_ = getDefaultInstance().getQualifier(); + onChanged(); + return this; + } + + // optional bytes value = 2; + private com.google.protobuf.ByteString value_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getValue() { + return value_; + } + public Builder setValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + + // optional uint64 timestamp = 3; + private long timestamp_ ; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000004; + timestamp_ = value; + onChanged(); + return this; + } + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000004); + timestamp_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue.QualifierValue) + } + + static { + defaultInstance = new QualifierValue(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Mutate.ColumnValue.QualifierValue) + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; + public static final int QUALIFIERVALUE_FIELD_NUMBER = 2; + private java.util.List qualifierValue_; + public java.util.List getQualifierValueList() { + return qualifierValue_; + } + public java.util.List + getQualifierValueOrBuilderList() { + return qualifierValue_; + } + public int getQualifierValueCount() { + return qualifierValue_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { + return qualifierValue_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + int index) { + return qualifierValue_.get(index); + } + + // optional uint64 timestamp = 3; + public static final int TIMESTAMP_FIELD_NUMBER = 3; + private long timestamp_; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getTimestamp() { + return timestamp_; + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + qualifierValue_ = java.util.Collections.emptyList(); + timestamp_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getQualifierValueCount(); i++) { + if (!getQualifierValue(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + for (int i = 0; i < qualifierValue_.size(); i++) { + output.writeMessage(2, qualifierValue_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(3, timestamp_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + for (int i = 0; i < qualifierValue_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, qualifierValue_.get(i)); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, timestamp_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && getQualifierValueList() + .equals(other.getQualifierValueList()); + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (getQualifierValueCount() > 0) { + hash = (37 * hash) + QUALIFIERVALUE_FIELD_NUMBER; + hash = (53 * hash) + getQualifierValueList().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_ColumnValue_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getQualifierValueFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (qualifierValueBuilder_ == null) { + qualifierValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + qualifierValueBuilder_.clear(); + } + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (qualifierValueBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + qualifierValue_ = java.util.Collections.unmodifiableList(qualifierValue_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.qualifierValue_ = qualifierValue_; + } else { + result.qualifierValue_ = qualifierValueBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.timestamp_ = timestamp_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (qualifierValueBuilder_ == null) { + if (!other.qualifierValue_.isEmpty()) { + if (qualifierValue_.isEmpty()) { + qualifierValue_ = other.qualifierValue_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureQualifierValueIsMutable(); + qualifierValue_.addAll(other.qualifierValue_); + } + onChanged(); + } + } else { + if (!other.qualifierValue_.isEmpty()) { + if (qualifierValueBuilder_.isEmpty()) { + qualifierValueBuilder_.dispose(); + qualifierValueBuilder_ = null; + qualifierValue_ = other.qualifierValue_; + bitField0_ = (bitField0_ & ~0x00000002); + qualifierValueBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getQualifierValueFieldBuilder() : null; + } else { + qualifierValueBuilder_.addAllMessages(other.qualifierValue_); + } + } + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + for (int i = 0; i < getQualifierValueCount(); i++) { + if (!getQualifierValue(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addQualifierValue(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + timestamp_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // repeated .Mutate.ColumnValue.QualifierValue qualifierValue = 2; + private java.util.List qualifierValue_ = + java.util.Collections.emptyList(); + private void ensureQualifierValueIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + qualifierValue_ = new java.util.ArrayList(qualifierValue_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> qualifierValueBuilder_; + + public java.util.List getQualifierValueList() { + if (qualifierValueBuilder_ == null) { + return java.util.Collections.unmodifiableList(qualifierValue_); + } else { + return qualifierValueBuilder_.getMessageList(); + } + } + public int getQualifierValueCount() { + if (qualifierValueBuilder_ == null) { + return qualifierValue_.size(); + } else { + return qualifierValueBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue getQualifierValue(int index) { + if (qualifierValueBuilder_ == null) { + return qualifierValue_.get(index); + } else { + return qualifierValueBuilder_.getMessage(index); + } + } + public Builder setQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue value) { + if (qualifierValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierValueIsMutable(); + qualifierValue_.set(index, value); + onChanged(); + } else { + qualifierValueBuilder_.setMessage(index, value); + } + return this; + } + public Builder setQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.set(index, builderForValue.build()); + onChanged(); + } else { + qualifierValueBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addQualifierValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue value) { + if (qualifierValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierValueIsMutable(); + qualifierValue_.add(value); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(value); + } + return this; + } + public Builder addQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue value) { + if (qualifierValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureQualifierValueIsMutable(); + qualifierValue_.add(index, value); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(index, value); + } + return this; + } + public Builder addQualifierValue( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.add(builderForValue.build()); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addQualifierValue( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder builderForValue) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.add(index, builderForValue.build()); + onChanged(); + } else { + qualifierValueBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllQualifierValue( + java.lang.Iterable values) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + super.addAll(values, qualifierValue_); + onChanged(); + } else { + qualifierValueBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearQualifierValue() { + if (qualifierValueBuilder_ == null) { + qualifierValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + qualifierValueBuilder_.clear(); + } + return this; + } + public Builder removeQualifierValue(int index) { + if (qualifierValueBuilder_ == null) { + ensureQualifierValueIsMutable(); + qualifierValue_.remove(index); + onChanged(); + } else { + qualifierValueBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder getQualifierValueBuilder( + int index) { + return getQualifierValueFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder getQualifierValueOrBuilder( + int index) { + if (qualifierValueBuilder_ == null) { + return qualifierValue_.get(index); } else { + return qualifierValueBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getQualifierValueOrBuilderList() { + if (qualifierValueBuilder_ != null) { + return qualifierValueBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(qualifierValue_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder() { + return getQualifierValueFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder addQualifierValueBuilder( + int index) { + return getQualifierValueFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.getDefaultInstance()); + } + public java.util.List + getQualifierValueBuilderList() { + return getQualifierValueFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder> + getQualifierValueFieldBuilder() { + if (qualifierValueBuilder_ == null) { + qualifierValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValueOrBuilder>( + qualifierValue_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + qualifierValue_ = null; + } + return qualifierValueBuilder_; + } + + // optional uint64 timestamp = 3; + private long timestamp_ ; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000004; + timestamp_ = value; + onChanged(); + return this; + } + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000004); + timestamp_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Mutate.ColumnValue) + } + + static { + defaultInstance = new ColumnValue(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Mutate.ColumnValue) + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required .Mutate.MutateType mutateType = 2; + public static final int MUTATETYPE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType mutateType_; + public boolean hasMutateType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType getMutateType() { + return mutateType_; + } + + // repeated .Mutate.ColumnValue columnValue = 3; + public static final int COLUMNVALUE_FIELD_NUMBER = 3; + private java.util.List columnValue_; + public java.util.List getColumnValueList() { + return columnValue_; + } + public java.util.List + getColumnValueOrBuilderList() { + return columnValue_; + } + public int getColumnValueCount() { + return columnValue_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getColumnValue(int index) { + return columnValue_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + int index) { + return columnValue_.get(index); + } + + // repeated .Attribute attribute = 4; + public static final int ATTRIBUTE_FIELD_NUMBER = 4; + private java.util.List attribute_; + public java.util.List getAttributeList() { + return attribute_; + } + public java.util.List + getAttributeOrBuilderList() { + return attribute_; + } + public int getAttributeCount() { + return attribute_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + return attribute_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + int index) { + return attribute_.get(index); + } + + // optional uint64 timestamp = 5; + public static final int TIMESTAMP_FIELD_NUMBER = 5; + private long timestamp_; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getTimestamp() { + return timestamp_; + } + + // optional uint64 lockId = 6; + public static final int LOCKID_FIELD_NUMBER = 6; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getLockId() { + return lockId_; + } + + // optional bool writeToWAL = 7 [default = true]; + public static final int WRITETOWAL_FIELD_NUMBER = 7; + private boolean writeToWAL_; + public boolean hasWriteToWAL() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public boolean getWriteToWAL() { + return writeToWAL_; + } + + // optional .TimeRange timeRange = 10; + public static final int TIMERANGE_FIELD_NUMBER = 10; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + return timeRange_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + return timeRange_; + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; + columnValue_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + timestamp_ = 0L; + lockId_ = 0L; + writeToWAL_ = true; + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMutateType()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getColumnValueCount(); i++) { + if (!getColumnValue(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, mutateType_.getNumber()); + } + for (int i = 0; i < columnValue_.size(); i++) { + output.writeMessage(3, columnValue_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + output.writeMessage(4, attribute_.get(i)); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(5, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(6, lockId_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBool(7, writeToWAL_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeMessage(10, timeRange_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, mutateType_.getNumber()); + } + for (int i = 0; i < columnValue_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, columnValue_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, attribute_.get(i)); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(5, timestamp_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(6, lockId_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(7, writeToWAL_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(10, timeRange_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasMutateType() == other.hasMutateType()); + if (hasMutateType()) { + result = result && + (getMutateType() == other.getMutateType()); + } + result = result && getColumnValueList() + .equals(other.getColumnValueList()); + result = result && getAttributeList() + .equals(other.getAttributeList()); + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && (hasWriteToWAL() == other.hasWriteToWAL()); + if (hasWriteToWAL()) { + result = result && (getWriteToWAL() + == other.getWriteToWAL()); + } + result = result && (hasTimeRange() == other.hasTimeRange()); + if (hasTimeRange()) { + result = result && getTimeRange() + .equals(other.getTimeRange()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasMutateType()) { + hash = (37 * hash) + MUTATETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getMutateType()); + } + if (getColumnValueCount() > 0) { + hash = (37 * hash) + COLUMNVALUE_FIELD_NUMBER; + hash = (53 * hash) + getColumnValueList().hashCode(); + } + if (getAttributeCount() > 0) { + hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; + hash = (53 * hash) + getAttributeList().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getTimestamp()); + } + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + if (hasWriteToWAL()) { + hash = (37 * hash) + WRITETOWAL_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getWriteToWAL()); + } + if (hasTimeRange()) { + hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; + hash = (53 * hash) + getTimeRange().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Mutate_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnValueFieldBuilder(); + getAttributeFieldBuilder(); + getTimeRangeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; + bitField0_ = (bitField0_ & ~0x00000002); + if (columnValueBuilder_ == null) { + columnValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + } else { + columnValueBuilder_.clear(); + } + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + attributeBuilder_.clear(); + } + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000010); + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000020); + writeToWAL_ = true; + bitField0_ = (bitField0_ & ~0x00000040); + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.mutateType_ = mutateType_; + if (columnValueBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004)) { + columnValue_ = java.util.Collections.unmodifiableList(columnValue_); + bitField0_ = (bitField0_ & ~0x00000004); + } + result.columnValue_ = columnValue_; + } else { + result.columnValue_ = columnValueBuilder_.build(); + } + if (attributeBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.attribute_ = attribute_; + } else { + result.attribute_ = attributeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000004; + } + result.timestamp_ = timestamp_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + result.lockId_ = lockId_; + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.writeToWAL_ = writeToWAL_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + if (timeRangeBuilder_ == null) { + result.timeRange_ = timeRange_; + } else { + result.timeRange_ = timeRangeBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasMutateType()) { + setMutateType(other.getMutateType()); + } + if (columnValueBuilder_ == null) { + if (!other.columnValue_.isEmpty()) { + if (columnValue_.isEmpty()) { + columnValue_ = other.columnValue_; + bitField0_ = (bitField0_ & ~0x00000004); + } else { + ensureColumnValueIsMutable(); + columnValue_.addAll(other.columnValue_); + } + onChanged(); + } + } else { + if (!other.columnValue_.isEmpty()) { + if (columnValueBuilder_.isEmpty()) { + columnValueBuilder_.dispose(); + columnValueBuilder_ = null; + columnValue_ = other.columnValue_; + bitField0_ = (bitField0_ & ~0x00000004); + columnValueBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnValueFieldBuilder() : null; + } else { + columnValueBuilder_.addAllMessages(other.columnValue_); + } + } + } + if (attributeBuilder_ == null) { + if (!other.attribute_.isEmpty()) { + if (attribute_.isEmpty()) { + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureAttributeIsMutable(); + attribute_.addAll(other.attribute_); + } + onChanged(); + } + } else { + if (!other.attribute_.isEmpty()) { + if (attributeBuilder_.isEmpty()) { + attributeBuilder_.dispose(); + attributeBuilder_ = null; + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000008); + attributeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributeFieldBuilder() : null; + } else { + attributeBuilder_.addAllMessages(other.attribute_); + } + } + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + if (other.hasWriteToWAL()) { + setWriteToWAL(other.getWriteToWAL()); + } + if (other.hasTimeRange()) { + mergeTimeRange(other.getTimeRange()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasMutateType()) { + + return false; + } + for (int i = 0; i < getColumnValueCount(); i++) { + if (!getColumnValue(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType value = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + mutateType_ = value; + } + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumnValue(subBuilder.buildPartial()); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttribute(subBuilder.buildPartial()); + break; + } + case 40: { + bitField0_ |= 0x00000010; + timestamp_ = input.readUInt64(); + break; + } + case 48: { + bitField0_ |= 0x00000020; + lockId_ = input.readUInt64(); + break; + } + case 56: { + bitField0_ |= 0x00000040; + writeToWAL_ = input.readBool(); + break; + } + case 82: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); + if (hasTimeRange()) { + subBuilder.mergeFrom(getTimeRange()); + } + input.readMessage(subBuilder, extensionRegistry); + setTimeRange(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required .Mutate.MutateType mutateType = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; + public boolean hasMutateType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType getMutateType() { + return mutateType_; + } + public Builder setMutateType(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + mutateType_ = value; + onChanged(); + return this; + } + public Builder clearMutateType() { + bitField0_ = (bitField0_ & ~0x00000002); + mutateType_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.MutateType.APPEND; + onChanged(); + return this; + } + + // repeated .Mutate.ColumnValue columnValue = 3; + private java.util.List columnValue_ = + java.util.Collections.emptyList(); + private void ensureColumnValueIsMutable() { + if (!((bitField0_ & 0x00000004) == 0x00000004)) { + columnValue_ = new java.util.ArrayList(columnValue_); + bitField0_ |= 0x00000004; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder> columnValueBuilder_; + + public java.util.List getColumnValueList() { + if (columnValueBuilder_ == null) { + return java.util.Collections.unmodifiableList(columnValue_); + } else { + return columnValueBuilder_.getMessageList(); + } + } + public int getColumnValueCount() { + if (columnValueBuilder_ == null) { + return columnValue_.size(); + } else { + return columnValueBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue getColumnValue(int index) { + if (columnValueBuilder_ == null) { + return columnValue_.get(index); + } else { + return columnValueBuilder_.getMessage(index); + } + } + public Builder setColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue value) { + if (columnValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnValueIsMutable(); + columnValue_.set(index, value); + onChanged(); + } else { + columnValueBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder builderForValue) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.set(index, builderForValue.build()); + onChanged(); + } else { + columnValueBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumnValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue value) { + if (columnValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnValueIsMutable(); + columnValue_.add(value); + onChanged(); + } else { + columnValueBuilder_.addMessage(value); + } + return this; + } + public Builder addColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue value) { + if (columnValueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnValueIsMutable(); + columnValue_.add(index, value); + onChanged(); + } else { + columnValueBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumnValue( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder builderForValue) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.add(builderForValue.build()); + onChanged(); + } else { + columnValueBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumnValue( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder builderForValue) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.add(index, builderForValue.build()); + onChanged(); + } else { + columnValueBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumnValue( + java.lang.Iterable values) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + super.addAll(values, columnValue_); + onChanged(); + } else { + columnValueBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumnValue() { + if (columnValueBuilder_ == null) { + columnValue_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000004); + onChanged(); + } else { + columnValueBuilder_.clear(); + } + return this; + } + public Builder removeColumnValue(int index) { + if (columnValueBuilder_ == null) { + ensureColumnValueIsMutable(); + columnValue_.remove(index); + onChanged(); + } else { + columnValueBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder getColumnValueBuilder( + int index) { + return getColumnValueFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder getColumnValueOrBuilder( + int index) { + if (columnValueBuilder_ == null) { + return columnValue_.get(index); } else { + return columnValueBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnValueOrBuilderList() { + if (columnValueBuilder_ != null) { + return columnValueBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(columnValue_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder() { + return getColumnValueFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder addColumnValueBuilder( + int index) { + return getColumnValueFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.getDefaultInstance()); + } + public java.util.List + getColumnValueBuilderList() { + return getColumnValueFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder> + getColumnValueFieldBuilder() { + if (columnValueBuilder_ == null) { + columnValueBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValueOrBuilder>( + columnValue_, + ((bitField0_ & 0x00000004) == 0x00000004), + getParentForChildren(), + isClean()); + columnValue_ = null; + } + return columnValueBuilder_; + } + + // repeated .Attribute attribute = 4; + private java.util.List attribute_ = + java.util.Collections.emptyList(); + private void ensureAttributeIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + attribute_ = new java.util.ArrayList(attribute_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> attributeBuilder_; + + public java.util.List getAttributeList() { + if (attributeBuilder_ == null) { + return java.util.Collections.unmodifiableList(attribute_); + } else { + return attributeBuilder_.getMessageList(); + } + } + public int getAttributeCount() { + if (attributeBuilder_ == null) { + return attribute_.size(); + } else { + return attributeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); + } else { + return attributeBuilder_.getMessage(index); + } + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.set(index, value); + onChanged(); + } else { + attributeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.set(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(value); + onChanged(); + } else { + attributeBuilder_.addMessage(value); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(index, value); + onChanged(); + } else { + attributeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttribute( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttribute( + java.lang.Iterable values) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + super.addAll(values, attribute_); + onChanged(); + } else { + attributeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttribute() { + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + attributeBuilder_.clear(); + } + return this; + } + public Builder removeAttribute(int index) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.remove(index); + onChanged(); + } else { + attributeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder getAttributeBuilder( + int index) { + return getAttributeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); } else { + return attributeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributeOrBuilderList() { + if (attributeBuilder_ != null) { + return attributeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attribute_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder() { + return getAttributeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder( + int index) { + return getAttributeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + } + public java.util.List + getAttributeBuilderList() { + return getAttributeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> + getAttributeFieldBuilder() { + if (attributeBuilder_ == null) { + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder>( + attribute_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + attribute_ = null; + } + return attributeBuilder_; + } + + // optional uint64 timestamp = 5; + private long timestamp_ ; + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public long getTimestamp() { + return timestamp_; + } + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000010; + timestamp_ = value; + onChanged(); + return this; + } + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000010); + timestamp_ = 0L; + onChanged(); + return this; + } + + // optional uint64 lockId = 6; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000020; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000020); + lockId_ = 0L; + onChanged(); + return this; + } + + // optional bool writeToWAL = 7 [default = true]; + private boolean writeToWAL_ = true; + public boolean hasWriteToWAL() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public boolean getWriteToWAL() { + return writeToWAL_; + } + public Builder setWriteToWAL(boolean value) { + bitField0_ |= 0x00000040; + writeToWAL_ = value; + onChanged(); + return this; + } + public Builder clearWriteToWAL() { + bitField0_ = (bitField0_ & ~0x00000040); + writeToWAL_ = true; + onChanged(); + return this; + } + + // optional .TimeRange timeRange = 10; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + if (timeRangeBuilder_ == null) { + return timeRange_; + } else { + return timeRangeBuilder_.getMessage(); + } + } + public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timeRange_ = value; + onChanged(); + } else { + timeRangeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000080; + return this; + } + public Builder setTimeRange( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { + if (timeRangeBuilder_ == null) { + timeRange_ = builderForValue.build(); + onChanged(); + } else { + timeRangeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000080; + return this; + } + public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (((bitField0_ & 0x00000080) == 0x00000080) && + timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { + timeRange_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); + } else { + timeRange_ = value; + } + onChanged(); + } else { + timeRangeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000080; + return this; + } + public Builder clearTimeRange() { + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + onChanged(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000080); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { + bitField0_ |= 0x00000080; + onChanged(); + return getTimeRangeFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + if (timeRangeBuilder_ != null) { + return timeRangeBuilder_.getMessageOrBuilder(); + } else { + return timeRange_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> + getTimeRangeFieldBuilder() { + if (timeRangeBuilder_ == null) { + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( + timeRange_, + getParentForChildren(), + isClean()); + timeRange_ = null; + } + return timeRangeBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Mutate) + } + + static { + defaultInstance = new Mutate(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Mutate) + } + + public interface MutateRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .Mutate mutate = 2; + boolean hasMutate(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getMutate(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder getMutateOrBuilder(); + + // optional .Condition condition = 3; + boolean hasCondition(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getCondition(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder getConditionOrBuilder(); + } + public static final class MutateRequest extends + com.google.protobuf.GeneratedMessage + implements MutateRequestOrBuilder { + // Use MutateRequest.newBuilder() to construct. + private MutateRequest(Builder builder) { + super(builder); + } + private MutateRequest(boolean noInit) {} + + private static final MutateRequest defaultInstance; + public static MutateRequest getDefaultInstance() { + return defaultInstance; + } + + public MutateRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .Mutate mutate = 2; + public static final int MUTATE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate mutate_; + public boolean hasMutate() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getMutate() { + return mutate_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder getMutateOrBuilder() { + return mutate_; + } + + // optional .Condition condition = 3; + public static final int CONDITION_FIELD_NUMBER = 3; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition condition_; + public boolean hasCondition() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getCondition() { + return condition_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder getConditionOrBuilder() { + return condition_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); + condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMutate()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getMutate().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (hasCondition()) { + if (!getCondition().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, mutate_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(3, condition_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, mutate_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(3, condition_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasMutate() == other.hasMutate()); + if (hasMutate()) { + result = result && getMutate() + .equals(other.getMutate()); + } + result = result && (hasCondition() == other.hasCondition()); + if (hasCondition()) { + result = result && getCondition() + .equals(other.getCondition()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasMutate()) { + hash = (37 * hash) + MUTATE_FIELD_NUMBER; + hash = (53 * hash) + getMutate().hashCode(); + } + if (hasCondition()) { + hash = (37 * hash) + CONDITION_FIELD_NUMBER; + hash = (53 * hash) + getCondition().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getMutateFieldBuilder(); + getConditionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (mutateBuilder_ == null) { + mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); + } else { + mutateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + if (conditionBuilder_ == null) { + condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + } else { + conditionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (mutateBuilder_ == null) { + result.mutate_ = mutate_; + } else { + result.mutate_ = mutateBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + if (conditionBuilder_ == null) { + result.condition_ = condition_; + } else { + result.condition_ = conditionBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasMutate()) { + mergeMutate(other.getMutate()); + } + if (other.hasCondition()) { + mergeCondition(other.getCondition()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasMutate()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getMutate().isInitialized()) { + + return false; + } + if (hasCondition()) { + if (!getCondition().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.newBuilder(); + if (hasMutate()) { + subBuilder.mergeFrom(getMutate()); + } + input.readMessage(subBuilder, extensionRegistry); + setMutate(subBuilder.buildPartial()); + break; + } + case 26: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.newBuilder(); + if (hasCondition()) { + subBuilder.mergeFrom(getCondition()); + } + input.readMessage(subBuilder, extensionRegistry); + setCondition(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .Mutate mutate = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder> mutateBuilder_; + public boolean hasMutate() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate getMutate() { + if (mutateBuilder_ == null) { + return mutate_; + } else { + return mutateBuilder_.getMessage(); + } + } + public Builder setMutate(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate value) { + if (mutateBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + mutate_ = value; + onChanged(); + } else { + mutateBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setMutate( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder builderForValue) { + if (mutateBuilder_ == null) { + mutate_ = builderForValue.build(); + onChanged(); + } else { + mutateBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeMutate(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate value) { + if (mutateBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + mutate_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance()) { + mutate_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.newBuilder(mutate_).mergeFrom(value).buildPartial(); + } else { + mutate_ = value; + } + onChanged(); + } else { + mutateBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearMutate() { + if (mutateBuilder_ == null) { + mutate_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.getDefaultInstance(); + onChanged(); + } else { + mutateBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder getMutateBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getMutateFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder getMutateOrBuilder() { + if (mutateBuilder_ != null) { + return mutateBuilder_.getMessageOrBuilder(); + } else { + return mutate_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder> + getMutateFieldBuilder() { + if (mutateBuilder_ == null) { + mutateBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateOrBuilder>( + mutate_, + getParentForChildren(), + isClean()); + mutate_ = null; + } + return mutateBuilder_; + } + + // optional .Condition condition = 3; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder> conditionBuilder_; + public boolean hasCondition() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition getCondition() { + if (conditionBuilder_ == null) { + return condition_; + } else { + return conditionBuilder_.getMessage(); + } + } + public Builder setCondition(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition value) { + if (conditionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + condition_ = value; + onChanged(); + } else { + conditionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder setCondition( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder builderForValue) { + if (conditionBuilder_ == null) { + condition_ = builderForValue.build(); + onChanged(); + } else { + conditionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder mergeCondition(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition value) { + if (conditionBuilder_ == null) { + if (((bitField0_ & 0x00000004) == 0x00000004) && + condition_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance()) { + condition_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.newBuilder(condition_).mergeFrom(value).buildPartial(); + } else { + condition_ = value; + } + onChanged(); + } else { + conditionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000004; + return this; + } + public Builder clearCondition() { + if (conditionBuilder_ == null) { + condition_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.getDefaultInstance(); + onChanged(); + } else { + conditionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder getConditionBuilder() { + bitField0_ |= 0x00000004; + onChanged(); + return getConditionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder getConditionOrBuilder() { + if (conditionBuilder_ != null) { + return conditionBuilder_.getMessageOrBuilder(); + } else { + return condition_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder> + getConditionFieldBuilder() { + if (conditionBuilder_ == null) { + conditionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ConditionOrBuilder>( + condition_, + getParentForChildren(), + isClean()); + condition_ = null; + } + return conditionBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MutateRequest) + } + + static { + defaultInstance = new MutateRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MutateRequest) + } + + public interface MutateResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional .Result result = 1; + boolean hasResult(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder(); + + // optional bool processed = 2; + boolean hasProcessed(); + boolean getProcessed(); + } + public static final class MutateResponse extends + com.google.protobuf.GeneratedMessage + implements MutateResponseOrBuilder { + // Use MutateResponse.newBuilder() to construct. + private MutateResponse(Builder builder) { + super(builder); + } + private MutateResponse(boolean noInit) {} + + private static final MutateResponse defaultInstance; + public static MutateResponse getDefaultInstance() { + return defaultInstance; + } + + public MutateResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_fieldAccessorTable; + } + + private int bitField0_; + // optional .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { + return result_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { + return result_; + } + + // optional bool processed = 2; + public static final int PROCESSED_FIELD_NUMBER = 2; + private boolean processed_; + public boolean hasProcessed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getProcessed() { + return processed_; + } + + private void initFields() { + result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + processed_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasResult()) { + if (!getResult().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(2, processed_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, processed_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse) obj; + + boolean result = true; + result = result && (hasResult() == other.hasResult()); + if (hasResult()) { + result = result && getResult() + .equals(other.getResult()); + } + result = result && (hasProcessed() == other.hasProcessed()); + if (hasProcessed()) { + result = result && (getProcessed() + == other.getProcessed()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasResult()) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResult().hashCode(); + } + if (hasProcessed()) { + hash = (37 * hash) + PROCESSED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getProcessed()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MutateResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + processed_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (resultBuilder_ == null) { + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.processed_ = processed_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance()) return this; + if (other.hasResult()) { + mergeResult(other.getResult()); + } + if (other.hasProcessed()) { + setProcessed(other.getProcessed()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasResult()) { + if (!getResult().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(); + if (hasResult()) { + subBuilder.mergeFrom(getResult()); + } + input.readMessage(subBuilder, extensionRegistry); + setResult(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + processed_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional .Result result = 1; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> resultBuilder_; + public boolean hasResult() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult() { + if (resultBuilder_ == null) { + return result_; + } else { + return resultBuilder_.getMessage(); + } + } + public Builder setResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + result_ = value; + onChanged(); + } else { + resultBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setResult( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + result_ = builderForValue.build(); + onChanged(); + } else { + resultBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + result_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()) { + result_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(result_).mergeFrom(value).buildPartial(); + } else { + result_ = value; + } + onChanged(); + } else { + resultBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance(); + onChanged(); + } else { + resultBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder getResultBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getResultFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilder(); + } else { + return result_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder>( + result_, + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional bool processed = 2; + private boolean processed_ ; + public boolean hasProcessed() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getProcessed() { + return processed_; + } + public Builder setProcessed(boolean value) { + bitField0_ |= 0x00000002; + processed_ = value; + onChanged(); + return this; + } + public Builder clearProcessed() { + bitField0_ = (bitField0_ & ~0x00000002); + processed_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:MutateResponse) + } + + static { + defaultInstance = new MutateResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MutateResponse) + } + + public interface ScanOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Column column = 1; + java.util.List + getColumnList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index); + int getColumnCount(); + java.util.List + getColumnOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index); + + // repeated .Attribute attribute = 2; + java.util.List + getAttributeList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index); + int getAttributeCount(); + java.util.List + getAttributeOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + int index); + + // optional bytes startRow = 3; + boolean hasStartRow(); + com.google.protobuf.ByteString getStartRow(); + + // optional bytes stopRow = 4; + boolean hasStopRow(); + com.google.protobuf.ByteString getStopRow(); + + // optional .Parameter filter = 5; + boolean hasFilter(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder(); + + // optional .TimeRange timeRange = 6; + boolean hasTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder(); + + // optional uint32 maxVersions = 7 [default = 1]; + boolean hasMaxVersions(); + int getMaxVersions(); + + // optional bool cacheBlocks = 8 [default = true]; + boolean hasCacheBlocks(); + boolean getCacheBlocks(); + + // optional uint32 rowsToCache = 9; + boolean hasRowsToCache(); + int getRowsToCache(); + + // optional uint32 batchSize = 10; + boolean hasBatchSize(); + int getBatchSize(); + } + public static final class Scan extends + com.google.protobuf.GeneratedMessage + implements ScanOrBuilder { + // Use Scan.newBuilder() to construct. + private Scan(Builder builder) { + super(builder); + } + private Scan(boolean noInit) {} + + private static final Scan defaultInstance; + public static Scan getDefaultInstance() { + return defaultInstance; + } + + public Scan getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_fieldAccessorTable; + } + + private int bitField0_; + // repeated .Column column = 1; + public static final int COLUMN_FIELD_NUMBER = 1; + private java.util.List column_; + public java.util.List getColumnList() { + return column_; + } + public java.util.List + getColumnOrBuilderList() { + return column_; + } + public int getColumnCount() { + return column_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { + return column_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + return column_.get(index); + } + + // repeated .Attribute attribute = 2; + public static final int ATTRIBUTE_FIELD_NUMBER = 2; + private java.util.List attribute_; + public java.util.List getAttributeList() { + return attribute_; + } + public java.util.List + getAttributeOrBuilderList() { + return attribute_; + } + public int getAttributeCount() { + return attribute_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + return attribute_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + int index) { + return attribute_.get(index); + } + + // optional bytes startRow = 3; + public static final int STARTROW_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString startRow_; + public boolean hasStartRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getStartRow() { + return startRow_; + } + + // optional bytes stopRow = 4; + public static final int STOPROW_FIELD_NUMBER = 4; + private com.google.protobuf.ByteString stopRow_; + public boolean hasStopRow() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getStopRow() { + return stopRow_; + } + + // optional .Parameter filter = 5; + public static final int FILTER_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { + return filter_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { + return filter_; + } + + // optional .TimeRange timeRange = 6; + public static final int TIMERANGE_FIELD_NUMBER = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + return timeRange_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + return timeRange_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + public static final int MAXVERSIONS_FIELD_NUMBER = 7; + private int maxVersions_; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public int getMaxVersions() { + return maxVersions_; + } + + // optional bool cacheBlocks = 8 [default = true]; + public static final int CACHEBLOCKS_FIELD_NUMBER = 8; + private boolean cacheBlocks_; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + + // optional uint32 rowsToCache = 9; + public static final int ROWSTOCACHE_FIELD_NUMBER = 9; + private int rowsToCache_; + public boolean hasRowsToCache() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getRowsToCache() { + return rowsToCache_; + } + + // optional uint32 batchSize = 10; + public static final int BATCHSIZE_FIELD_NUMBER = 10; + private int batchSize_; + public boolean hasBatchSize() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public int getBatchSize() { + return batchSize_; + } + + private void initFields() { + column_ = java.util.Collections.emptyList(); + attribute_ = java.util.Collections.emptyList(); + startRow_ = com.google.protobuf.ByteString.EMPTY; + stopRow_ = com.google.protobuf.ByteString.EMPTY; + filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + maxVersions_ = 1; + cacheBlocks_ = true; + rowsToCache_ = 0; + batchSize_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < column_.size(); i++) { + output.writeMessage(1, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + output.writeMessage(2, attribute_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(3, startRow_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(4, stopRow_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeMessage(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeMessage(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeUInt32(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeBool(8, cacheBlocks_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + output.writeUInt32(9, rowsToCache_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + output.writeUInt32(10, batchSize_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < column_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, column_.get(i)); + } + for (int i = 0; i < attribute_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, attribute_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, startRow_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, stopRow_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, filter_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, timeRange_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(7, maxVersions_); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(8, cacheBlocks_); + } + if (((bitField0_ & 0x00000040) == 0x00000040)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(9, rowsToCache_); + } + if (((bitField0_ & 0x00000080) == 0x00000080)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(10, batchSize_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan) obj; + + boolean result = true; + result = result && getColumnList() + .equals(other.getColumnList()); + result = result && getAttributeList() + .equals(other.getAttributeList()); + result = result && (hasStartRow() == other.hasStartRow()); + if (hasStartRow()) { + result = result && getStartRow() + .equals(other.getStartRow()); + } + result = result && (hasStopRow() == other.hasStopRow()); + if (hasStopRow()) { + result = result && getStopRow() + .equals(other.getStopRow()); + } + result = result && (hasFilter() == other.hasFilter()); + if (hasFilter()) { + result = result && getFilter() + .equals(other.getFilter()); + } + result = result && (hasTimeRange() == other.hasTimeRange()); + if (hasTimeRange()) { + result = result && getTimeRange() + .equals(other.getTimeRange()); + } + result = result && (hasMaxVersions() == other.hasMaxVersions()); + if (hasMaxVersions()) { + result = result && (getMaxVersions() + == other.getMaxVersions()); + } + result = result && (hasCacheBlocks() == other.hasCacheBlocks()); + if (hasCacheBlocks()) { + result = result && (getCacheBlocks() + == other.getCacheBlocks()); + } + result = result && (hasRowsToCache() == other.hasRowsToCache()); + if (hasRowsToCache()) { + result = result && (getRowsToCache() + == other.getRowsToCache()); + } + result = result && (hasBatchSize() == other.hasBatchSize()); + if (hasBatchSize()) { + result = result && (getBatchSize() + == other.getBatchSize()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getColumnCount() > 0) { + hash = (37 * hash) + COLUMN_FIELD_NUMBER; + hash = (53 * hash) + getColumnList().hashCode(); + } + if (getAttributeCount() > 0) { + hash = (37 * hash) + ATTRIBUTE_FIELD_NUMBER; + hash = (53 * hash) + getAttributeList().hashCode(); + } + if (hasStartRow()) { + hash = (37 * hash) + STARTROW_FIELD_NUMBER; + hash = (53 * hash) + getStartRow().hashCode(); + } + if (hasStopRow()) { + hash = (37 * hash) + STOPROW_FIELD_NUMBER; + hash = (53 * hash) + getStopRow().hashCode(); + } + if (hasFilter()) { + hash = (37 * hash) + FILTER_FIELD_NUMBER; + hash = (53 * hash) + getFilter().hashCode(); + } + if (hasTimeRange()) { + hash = (37 * hash) + TIMERANGE_FIELD_NUMBER; + hash = (53 * hash) + getTimeRange().hashCode(); + } + if (hasMaxVersions()) { + hash = (37 * hash) + MAXVERSIONS_FIELD_NUMBER; + hash = (53 * hash) + getMaxVersions(); + } + if (hasCacheBlocks()) { + hash = (37 * hash) + CACHEBLOCKS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCacheBlocks()); + } + if (hasRowsToCache()) { + hash = (37 * hash) + ROWSTOCACHE_FIELD_NUMBER; + hash = (53 * hash) + getRowsToCache(); + } + if (hasBatchSize()) { + hash = (37 * hash) + BATCHSIZE_FIELD_NUMBER; + hash = (53 * hash) + getBatchSize(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Scan_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getColumnFieldBuilder(); + getAttributeFieldBuilder(); + getFilterFieldBuilder(); + getTimeRangeFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + columnBuilder_.clear(); + } + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + attributeBuilder_.clear(); + } + startRow_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + stopRow_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + maxVersions_ = 1; + bitField0_ = (bitField0_ & ~0x00000040); + cacheBlocks_ = true; + bitField0_ = (bitField0_ & ~0x00000080); + rowsToCache_ = 0; + bitField0_ = (bitField0_ & ~0x00000100); + batchSize_ = 0; + bitField0_ = (bitField0_ & ~0x00000200); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (columnBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + column_ = java.util.Collections.unmodifiableList(column_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.column_ = column_; + } else { + result.column_ = columnBuilder_.build(); + } + if (attributeBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + attribute_ = java.util.Collections.unmodifiableList(attribute_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.attribute_ = attribute_; + } else { + result.attribute_ = attributeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000001; + } + result.startRow_ = startRow_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000002; + } + result.stopRow_ = stopRow_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000004; + } + if (filterBuilder_ == null) { + result.filter_ = filter_; + } else { + result.filter_ = filterBuilder_.build(); + } + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + if (timeRangeBuilder_ == null) { + result.timeRange_ = timeRange_; + } else { + result.timeRange_ = timeRangeBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000010; + } + result.maxVersions_ = maxVersions_; + if (((from_bitField0_ & 0x00000080) == 0x00000080)) { + to_bitField0_ |= 0x00000020; + } + result.cacheBlocks_ = cacheBlocks_; + if (((from_bitField0_ & 0x00000100) == 0x00000100)) { + to_bitField0_ |= 0x00000040; + } + result.rowsToCache_ = rowsToCache_; + if (((from_bitField0_ & 0x00000200) == 0x00000200)) { + to_bitField0_ |= 0x00000080; + } + result.batchSize_ = batchSize_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance()) return this; + if (columnBuilder_ == null) { + if (!other.column_.isEmpty()) { + if (column_.isEmpty()) { + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureColumnIsMutable(); + column_.addAll(other.column_); + } + onChanged(); + } + } else { + if (!other.column_.isEmpty()) { + if (columnBuilder_.isEmpty()) { + columnBuilder_.dispose(); + columnBuilder_ = null; + column_ = other.column_; + bitField0_ = (bitField0_ & ~0x00000001); + columnBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getColumnFieldBuilder() : null; + } else { + columnBuilder_.addAllMessages(other.column_); + } + } + } + if (attributeBuilder_ == null) { + if (!other.attribute_.isEmpty()) { + if (attribute_.isEmpty()) { + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureAttributeIsMutable(); + attribute_.addAll(other.attribute_); + } + onChanged(); + } + } else { + if (!other.attribute_.isEmpty()) { + if (attributeBuilder_.isEmpty()) { + attributeBuilder_.dispose(); + attributeBuilder_ = null; + attribute_ = other.attribute_; + bitField0_ = (bitField0_ & ~0x00000002); + attributeBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getAttributeFieldBuilder() : null; + } else { + attributeBuilder_.addAllMessages(other.attribute_); + } + } + } + if (other.hasStartRow()) { + setStartRow(other.getStartRow()); + } + if (other.hasStopRow()) { + setStopRow(other.getStopRow()); + } + if (other.hasFilter()) { + mergeFilter(other.getFilter()); + } + if (other.hasTimeRange()) { + mergeTimeRange(other.getTimeRange()); + } + if (other.hasMaxVersions()) { + setMaxVersions(other.getMaxVersions()); + } + if (other.hasCacheBlocks()) { + setCacheBlocks(other.getCacheBlocks()); + } + if (other.hasRowsToCache()) { + setRowsToCache(other.getRowsToCache()); + } + if (other.hasBatchSize()) { + setBatchSize(other.getBatchSize()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getColumnCount(); i++) { + if (!getColumn(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getAttributeCount(); i++) { + if (!getAttribute(i).isInitialized()) { + + return false; + } + } + if (hasFilter()) { + if (!getFilter().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addColumn(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addAttribute(subBuilder.buildPartial()); + break; + } + case 26: { + bitField0_ |= 0x00000004; + startRow_ = input.readBytes(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + stopRow_ = input.readBytes(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + if (hasFilter()) { + subBuilder.mergeFrom(getFilter()); + } + input.readMessage(subBuilder, extensionRegistry); + setFilter(subBuilder.buildPartial()); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(); + if (hasTimeRange()) { + subBuilder.mergeFrom(getTimeRange()); + } + input.readMessage(subBuilder, extensionRegistry); + setTimeRange(subBuilder.buildPartial()); + break; + } + case 56: { + bitField0_ |= 0x00000040; + maxVersions_ = input.readUInt32(); + break; + } + case 64: { + bitField0_ |= 0x00000080; + cacheBlocks_ = input.readBool(); + break; + } + case 72: { + bitField0_ |= 0x00000100; + rowsToCache_ = input.readUInt32(); + break; + } + case 80: { + bitField0_ |= 0x00000200; + batchSize_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Column column = 1; + private java.util.List column_ = + java.util.Collections.emptyList(); + private void ensureColumnIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + column_ = new java.util.ArrayList(column_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> columnBuilder_; + + public java.util.List getColumnList() { + if (columnBuilder_ == null) { + return java.util.Collections.unmodifiableList(column_); + } else { + return columnBuilder_.getMessageList(); + } + } + public int getColumnCount() { + if (columnBuilder_ == null) { + return column_.size(); + } else { + return columnBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column getColumn(int index) { + if (columnBuilder_ == null) { + return column_.get(index); + } else { + return columnBuilder_.getMessage(index); + } + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.set(index, value); + onChanged(); + } else { + columnBuilder_.setMessage(index, value); + } + return this; + } + public Builder setColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.set(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addColumn(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(value); + onChanged(); + } else { + columnBuilder_.addMessage(value); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column value) { + if (columnBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureColumnIsMutable(); + column_.add(index, value); + onChanged(); + } else { + columnBuilder_.addMessage(index, value); + } + return this; + } + public Builder addColumn( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addColumn( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder builderForValue) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.add(index, builderForValue.build()); + onChanged(); + } else { + columnBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllColumn( + java.lang.Iterable values) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + super.addAll(values, column_); + onChanged(); + } else { + columnBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearColumn() { + if (columnBuilder_ == null) { + column_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + columnBuilder_.clear(); + } + return this; + } + public Builder removeColumn(int index) { + if (columnBuilder_ == null) { + ensureColumnIsMutable(); + column_.remove(index); + onChanged(); + } else { + columnBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder getColumnBuilder( + int index) { + return getColumnFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder getColumnOrBuilder( + int index) { + if (columnBuilder_ == null) { + return column_.get(index); } else { + return columnBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getColumnOrBuilderList() { + if (columnBuilder_ != null) { + return columnBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(column_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder() { + return getColumnFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder addColumnBuilder( + int index) { + return getColumnFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.getDefaultInstance()); + } + public java.util.List + getColumnBuilderList() { + return getColumnFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder> + getColumnFieldBuilder() { + if (columnBuilder_ == null) { + columnBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ColumnOrBuilder>( + column_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + column_ = null; + } + return columnBuilder_; + } + + // repeated .Attribute attribute = 2; + private java.util.List attribute_ = + java.util.Collections.emptyList(); + private void ensureAttributeIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + attribute_ = new java.util.ArrayList(attribute_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> attributeBuilder_; + + public java.util.List getAttributeList() { + if (attributeBuilder_ == null) { + return java.util.Collections.unmodifiableList(attribute_); + } else { + return attributeBuilder_.getMessageList(); + } + } + public int getAttributeCount() { + if (attributeBuilder_ == null) { + return attribute_.size(); + } else { + return attributeBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute getAttribute(int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); + } else { + return attributeBuilder_.getMessage(index); + } + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.set(index, value); + onChanged(); + } else { + attributeBuilder_.setMessage(index, value); + } + return this; + } + public Builder setAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.set(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAttribute(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(value); + onChanged(); + } else { + attributeBuilder_.addMessage(value); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute value) { + if (attributeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureAttributeIsMutable(); + attribute_.add(index, value); + onChanged(); + } else { + attributeBuilder_.addMessage(index, value); + } + return this; + } + public Builder addAttribute( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addAttribute( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder builderForValue) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.add(index, builderForValue.build()); + onChanged(); + } else { + attributeBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllAttribute( + java.lang.Iterable values) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + super.addAll(values, attribute_); + onChanged(); + } else { + attributeBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearAttribute() { + if (attributeBuilder_ == null) { + attribute_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + attributeBuilder_.clear(); + } + return this; + } + public Builder removeAttribute(int index) { + if (attributeBuilder_ == null) { + ensureAttributeIsMutable(); + attribute_.remove(index); + onChanged(); + } else { + attributeBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder getAttributeBuilder( + int index) { + return getAttributeFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder getAttributeOrBuilder( + int index) { + if (attributeBuilder_ == null) { + return attribute_.get(index); } else { + return attributeBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getAttributeOrBuilderList() { + if (attributeBuilder_ != null) { + return attributeBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(attribute_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder() { + return getAttributeFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder addAttributeBuilder( + int index) { + return getAttributeFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.getDefaultInstance()); + } + public java.util.List + getAttributeBuilderList() { + return getAttributeFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder> + getAttributeFieldBuilder() { + if (attributeBuilder_ == null) { + attributeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.AttributeOrBuilder>( + attribute_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + attribute_ = null; + } + return attributeBuilder_; + } + + // optional bytes startRow = 3; + private com.google.protobuf.ByteString startRow_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasStartRow() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getStartRow() { + return startRow_; + } + public Builder setStartRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + startRow_ = value; + onChanged(); + return this; + } + public Builder clearStartRow() { + bitField0_ = (bitField0_ & ~0x00000004); + startRow_ = getDefaultInstance().getStartRow(); + onChanged(); + return this; + } + + // optional bytes stopRow = 4; + private com.google.protobuf.ByteString stopRow_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasStopRow() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public com.google.protobuf.ByteString getStopRow() { + return stopRow_; + } + public Builder setStopRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + stopRow_ = value; + onChanged(); + return this; + } + public Builder clearStopRow() { + bitField0_ = (bitField0_ & ~0x00000008); + stopRow_ = getDefaultInstance().getStopRow(); + onChanged(); + return this; + } + + // optional .Parameter filter = 5; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> filterBuilder_; + public boolean hasFilter() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getFilter() { + if (filterBuilder_ == null) { + return filter_; + } else { + return filterBuilder_.getMessage(); + } + } + public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (filterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + filter_ = value; + onChanged(); + } else { + filterBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setFilter( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (filterBuilder_ == null) { + filter_ = builderForValue.build(); + onChanged(); + } else { + filterBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (filterBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + filter_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) { + filter_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(filter_).mergeFrom(value).buildPartial(); + } else { + filter_ = value; + } + onChanged(); + } else { + filterBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearFilter() { + if (filterBuilder_ == null) { + filter_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + onChanged(); + } else { + filterBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getFilterBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getFilterFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getFilterOrBuilder() { + if (filterBuilder_ != null) { + return filterBuilder_.getMessageOrBuilder(); + } else { + return filter_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + getFilterFieldBuilder() { + if (filterBuilder_ == null) { + filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + filter_, + getParentForChildren(), + isClean()); + filter_ = null; + } + return filterBuilder_; + } + + // optional .TimeRange timeRange = 6; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> timeRangeBuilder_; + public boolean hasTimeRange() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange getTimeRange() { + if (timeRangeBuilder_ == null) { + return timeRange_; + } else { + return timeRangeBuilder_.getMessage(); + } + } + public Builder setTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + timeRange_ = value; + onChanged(); + } else { + timeRangeBuilder_.setMessage(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder setTimeRange( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder builderForValue) { + if (timeRangeBuilder_ == null) { + timeRange_ = builderForValue.build(); + onChanged(); + } else { + timeRangeBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder mergeTimeRange(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange value) { + if (timeRangeBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020) && + timeRange_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance()) { + timeRange_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.newBuilder(timeRange_).mergeFrom(value).buildPartial(); + } else { + timeRange_ = value; + } + onChanged(); + } else { + timeRangeBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000020; + return this; + } + public Builder clearTimeRange() { + if (timeRangeBuilder_ == null) { + timeRange_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.getDefaultInstance(); + onChanged(); + } else { + timeRangeBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder getTimeRangeBuilder() { + bitField0_ |= 0x00000020; + onChanged(); + return getTimeRangeFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder getTimeRangeOrBuilder() { + if (timeRangeBuilder_ != null) { + return timeRangeBuilder_.getMessageOrBuilder(); + } else { + return timeRange_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder> + getTimeRangeFieldBuilder() { + if (timeRangeBuilder_ == null) { + timeRangeBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRange.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TimeRangeOrBuilder>( + timeRange_, + getParentForChildren(), + isClean()); + timeRange_ = null; + } + return timeRangeBuilder_; + } + + // optional uint32 maxVersions = 7 [default = 1]; + private int maxVersions_ = 1; + public boolean hasMaxVersions() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getMaxVersions() { + return maxVersions_; + } + public Builder setMaxVersions(int value) { + bitField0_ |= 0x00000040; + maxVersions_ = value; + onChanged(); + return this; + } + public Builder clearMaxVersions() { + bitField0_ = (bitField0_ & ~0x00000040); + maxVersions_ = 1; + onChanged(); + return this; + } + + // optional bool cacheBlocks = 8 [default = true]; + private boolean cacheBlocks_ = true; + public boolean hasCacheBlocks() { + return ((bitField0_ & 0x00000080) == 0x00000080); + } + public boolean getCacheBlocks() { + return cacheBlocks_; + } + public Builder setCacheBlocks(boolean value) { + bitField0_ |= 0x00000080; + cacheBlocks_ = value; + onChanged(); + return this; + } + public Builder clearCacheBlocks() { + bitField0_ = (bitField0_ & ~0x00000080); + cacheBlocks_ = true; + onChanged(); + return this; + } + + // optional uint32 rowsToCache = 9; + private int rowsToCache_ ; + public boolean hasRowsToCache() { + return ((bitField0_ & 0x00000100) == 0x00000100); + } + public int getRowsToCache() { + return rowsToCache_; + } + public Builder setRowsToCache(int value) { + bitField0_ |= 0x00000100; + rowsToCache_ = value; + onChanged(); + return this; + } + public Builder clearRowsToCache() { + bitField0_ = (bitField0_ & ~0x00000100); + rowsToCache_ = 0; + onChanged(); + return this; + } + + // optional uint32 batchSize = 10; + private int batchSize_ ; + public boolean hasBatchSize() { + return ((bitField0_ & 0x00000200) == 0x00000200); + } + public int getBatchSize() { + return batchSize_; + } + public Builder setBatchSize(int value) { + bitField0_ |= 0x00000200; + batchSize_ = value; + onChanged(); + return this; + } + public Builder clearBatchSize() { + bitField0_ = (bitField0_ & ~0x00000200); + batchSize_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Scan) + } + + static { + defaultInstance = new Scan(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Scan) + } + + public interface ScanRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional uint64 scannerId = 1; + boolean hasScannerId(); + long getScannerId(); + + // optional .Scan scan = 2; + boolean hasScan(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getScan(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder getScanOrBuilder(); + + // optional uint32 numberOfRows = 3; + boolean hasNumberOfRows(); + int getNumberOfRows(); + + // optional bool closeScanner = 4; + boolean hasCloseScanner(); + boolean getCloseScanner(); + } + public static final class ScanRequest extends + com.google.protobuf.GeneratedMessage + implements ScanRequestOrBuilder { + // Use ScanRequest.newBuilder() to construct. + private ScanRequest(Builder builder) { + super(builder); + } + private ScanRequest(boolean noInit) {} + + private static final ScanRequest defaultInstance; + public static ScanRequest getDefaultInstance() { + return defaultInstance; + } + + public ScanRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_fieldAccessorTable; + } + + private int bitField0_; + // optional uint64 scannerId = 1; + public static final int SCANNERID_FIELD_NUMBER = 1; + private long scannerId_; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getScannerId() { + return scannerId_; + } + + // optional .Scan scan = 2; + public static final int SCAN_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan scan_; + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getScan() { + return scan_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder getScanOrBuilder() { + return scan_; + } + + // optional uint32 numberOfRows = 3; + public static final int NUMBEROFROWS_FIELD_NUMBER = 3; + private int numberOfRows_; + public boolean hasNumberOfRows() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public int getNumberOfRows() { + return numberOfRows_; + } + + // optional bool closeScanner = 4; + public static final int CLOSESCANNER_FIELD_NUMBER = 4; + private boolean closeScanner_; + public boolean hasCloseScanner() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getCloseScanner() { + return closeScanner_; + } + + private void initFields() { + scannerId_ = 0L; + scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); + numberOfRows_ = 0; + closeScanner_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (hasScan()) { + if (!getScan().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, scannerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt32(3, numberOfRows_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBool(4, closeScanner_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, scannerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, scan_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(3, numberOfRows_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(4, closeScanner_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest) obj; + + boolean result = true; + result = result && (hasScannerId() == other.hasScannerId()); + if (hasScannerId()) { + result = result && (getScannerId() + == other.getScannerId()); + } + result = result && (hasScan() == other.hasScan()); + if (hasScan()) { + result = result && getScan() + .equals(other.getScan()); + } + result = result && (hasNumberOfRows() == other.hasNumberOfRows()); + if (hasNumberOfRows()) { + result = result && (getNumberOfRows() + == other.getNumberOfRows()); + } + result = result && (hasCloseScanner() == other.hasCloseScanner()); + if (hasCloseScanner()) { + result = result && (getCloseScanner() + == other.getCloseScanner()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasScannerId()) { + hash = (37 * hash) + SCANNERID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getScannerId()); + } + if (hasScan()) { + hash = (37 * hash) + SCAN_FIELD_NUMBER; + hash = (53 * hash) + getScan().hashCode(); + } + if (hasNumberOfRows()) { + hash = (37 * hash) + NUMBEROFROWS_FIELD_NUMBER; + hash = (53 * hash) + getNumberOfRows(); + } + if (hasCloseScanner()) { + hash = (37 * hash) + CLOSESCANNER_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCloseScanner()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getScanFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + scannerId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + numberOfRows_ = 0; + bitField0_ = (bitField0_ & ~0x00000004); + closeScanner_ = false; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.scannerId_ = scannerId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (scanBuilder_ == null) { + result.scan_ = scan_; + } else { + result.scan_ = scanBuilder_.build(); + } + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.numberOfRows_ = numberOfRows_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.closeScanner_ = closeScanner_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance()) return this; + if (other.hasScannerId()) { + setScannerId(other.getScannerId()); + } + if (other.hasScan()) { + mergeScan(other.getScan()); + } + if (other.hasNumberOfRows()) { + setNumberOfRows(other.getNumberOfRows()); + } + if (other.hasCloseScanner()) { + setCloseScanner(other.getCloseScanner()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (hasScan()) { + if (!getScan().isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + scannerId_ = input.readUInt64(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.newBuilder(); + if (hasScan()) { + subBuilder.mergeFrom(getScan()); + } + input.readMessage(subBuilder, extensionRegistry); + setScan(subBuilder.buildPartial()); + break; + } + case 24: { + bitField0_ |= 0x00000004; + numberOfRows_ = input.readUInt32(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + closeScanner_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional uint64 scannerId = 1; + private long scannerId_ ; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getScannerId() { + return scannerId_; + } + public Builder setScannerId(long value) { + bitField0_ |= 0x00000001; + scannerId_ = value; + onChanged(); + return this; + } + public Builder clearScannerId() { + bitField0_ = (bitField0_ & ~0x00000001); + scannerId_ = 0L; + onChanged(); + return this; + } + + // optional .Scan scan = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder> scanBuilder_; + public boolean hasScan() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan getScan() { + if (scanBuilder_ == null) { + return scan_; + } else { + return scanBuilder_.getMessage(); + } + } + public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + scan_ = value; + onChanged(); + } else { + scanBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setScan( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder builderForValue) { + if (scanBuilder_ == null) { + scan_ = builderForValue.build(); + onChanged(); + } else { + scanBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + scan_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance()) { + scan_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); + } else { + scan_ = value; + } + onChanged(); + } else { + scanBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearScan() { + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.getDefaultInstance(); + onChanged(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder getScanBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getScanFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder getScanOrBuilder() { + if (scanBuilder_ != null) { + return scanBuilder_.getMessageOrBuilder(); + } else { + return scan_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder> + getScanFieldBuilder() { + if (scanBuilder_ == null) { + scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanOrBuilder>( + scan_, + getParentForChildren(), + isClean()); + scan_ = null; + } + return scanBuilder_; + } + + // optional uint32 numberOfRows = 3; + private int numberOfRows_ ; + public boolean hasNumberOfRows() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public int getNumberOfRows() { + return numberOfRows_; + } + public Builder setNumberOfRows(int value) { + bitField0_ |= 0x00000004; + numberOfRows_ = value; + onChanged(); + return this; + } + public Builder clearNumberOfRows() { + bitField0_ = (bitField0_ & ~0x00000004); + numberOfRows_ = 0; + onChanged(); + return this; + } + + // optional bool closeScanner = 4; + private boolean closeScanner_ ; + public boolean hasCloseScanner() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public boolean getCloseScanner() { + return closeScanner_; + } + public Builder setCloseScanner(boolean value) { + bitField0_ |= 0x00000008; + closeScanner_ = value; + onChanged(); + return this; + } + public Builder clearCloseScanner() { + bitField0_ = (bitField0_ & ~0x00000008); + closeScanner_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ScanRequest) + } + + static { + defaultInstance = new ScanRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ScanRequest) + } + + public interface ScanResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Result result = 1; + java.util.List + getResultList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(int index); + int getResultCount(); + java.util.List + getResultOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder( + int index); + + // optional uint64 scannerId = 2; + boolean hasScannerId(); + long getScannerId(); + + // optional bool moreResults = 3; + boolean hasMoreResults(); + boolean getMoreResults(); + + // optional uint32 ttl = 4; + boolean hasTtl(); + int getTtl(); + } + public static final class ScanResponse extends + com.google.protobuf.GeneratedMessage + implements ScanResponseOrBuilder { + // Use ScanResponse.newBuilder() to construct. + private ScanResponse(Builder builder) { + super(builder); + } + private ScanResponse(boolean noInit) {} + + private static final ScanResponse defaultInstance; + public static ScanResponse getDefaultInstance() { + return defaultInstance; + } + + public ScanResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_fieldAccessorTable; + } + + private int bitField0_; + // repeated .Result result = 1; + public static final int RESULT_FIELD_NUMBER = 1; + private java.util.List result_; + public java.util.List getResultList() { + return result_; + } + public java.util.List + getResultOrBuilderList() { + return result_; + } + public int getResultCount() { + return result_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(int index) { + return result_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder( + int index) { + return result_.get(index); + } + + // optional uint64 scannerId = 2; + public static final int SCANNERID_FIELD_NUMBER = 2; + private long scannerId_; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getScannerId() { + return scannerId_; + } + + // optional bool moreResults = 3; + public static final int MORERESULTS_FIELD_NUMBER = 3; + private boolean moreResults_; + public boolean hasMoreResults() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getMoreResults() { + return moreResults_; + } + + // optional uint32 ttl = 4; + public static final int TTL_FIELD_NUMBER = 4; + private int ttl_; + public boolean hasTtl() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public int getTtl() { + return ttl_; + } + + private void initFields() { + result_ = java.util.Collections.emptyList(); + scannerId_ = 0L; + moreResults_ = false; + ttl_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < result_.size(); i++) { + output.writeMessage(1, result_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(2, scannerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBool(3, moreResults_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt32(4, ttl_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < result_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, result_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, scannerId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, moreResults_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(4, ttl_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse) obj; + + boolean result = true; + result = result && getResultList() + .equals(other.getResultList()); + result = result && (hasScannerId() == other.hasScannerId()); + if (hasScannerId()) { + result = result && (getScannerId() + == other.getScannerId()); + } + result = result && (hasMoreResults() == other.hasMoreResults()); + if (hasMoreResults()) { + result = result && (getMoreResults() + == other.getMoreResults()); + } + result = result && (hasTtl() == other.hasTtl()); + if (hasTtl()) { + result = result && (getTtl() + == other.getTtl()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResultCount() > 0) { + hash = (37 * hash) + RESULT_FIELD_NUMBER; + hash = (53 * hash) + getResultList().hashCode(); + } + if (hasScannerId()) { + hash = (37 * hash) + SCANNERID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getScannerId()); + } + if (hasMoreResults()) { + hash = (37 * hash) + MORERESULTS_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getMoreResults()); + } + if (hasTtl()) { + hash = (37 * hash) + TTL_FIELD_NUMBER; + hash = (53 * hash) + getTtl(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ScanResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResultFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + resultBuilder_.clear(); + } + scannerId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + moreResults_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + ttl_ = 0; + bitField0_ = (bitField0_ & ~0x00000008); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (resultBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = java.util.Collections.unmodifiableList(result_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.result_ = result_; + } else { + result.result_ = resultBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.scannerId_ = scannerId_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000002; + } + result.moreResults_ = moreResults_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000004; + } + result.ttl_ = ttl_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance()) return this; + if (resultBuilder_ == null) { + if (!other.result_.isEmpty()) { + if (result_.isEmpty()) { + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResultIsMutable(); + result_.addAll(other.result_); + } + onChanged(); + } + } else { + if (!other.result_.isEmpty()) { + if (resultBuilder_.isEmpty()) { + resultBuilder_.dispose(); + resultBuilder_ = null; + result_ = other.result_; + bitField0_ = (bitField0_ & ~0x00000001); + resultBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResultFieldBuilder() : null; + } else { + resultBuilder_.addAllMessages(other.result_); + } + } + } + if (other.hasScannerId()) { + setScannerId(other.getScannerId()); + } + if (other.hasMoreResults()) { + setMoreResults(other.getMoreResults()); + } + if (other.hasTtl()) { + setTtl(other.getTtl()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResultCount(); i++) { + if (!getResult(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addResult(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + scannerId_ = input.readUInt64(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + moreResults_ = input.readBool(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + ttl_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Result result = 1; + private java.util.List result_ = + java.util.Collections.emptyList(); + private void ensureResultIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + result_ = new java.util.ArrayList(result_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> resultBuilder_; + + public java.util.List getResultList() { + if (resultBuilder_ == null) { + return java.util.Collections.unmodifiableList(result_); + } else { + return resultBuilder_.getMessageList(); + } + } + public int getResultCount() { + if (resultBuilder_ == null) { + return result_.size(); + } else { + return resultBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result getResult(int index) { + if (resultBuilder_ == null) { + return result_.get(index); + } else { + return resultBuilder_.getMessage(index); + } + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.set(index, value); + onChanged(); + } else { + resultBuilder_.setMessage(index, value); + } + return this; + } + public Builder setResult( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.set(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addResult(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(value); + onChanged(); + } else { + resultBuilder_.addMessage(value); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result value) { + if (resultBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResultIsMutable(); + result_.add(index, value); + onChanged(); + } else { + resultBuilder_.addMessage(index, value); + } + return this; + } + public Builder addResult( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addResult( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder builderForValue) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.add(index, builderForValue.build()); + onChanged(); + } else { + resultBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllResult( + java.lang.Iterable values) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + super.addAll(values, result_); + onChanged(); + } else { + resultBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearResult() { + if (resultBuilder_ == null) { + result_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + resultBuilder_.clear(); + } + return this; + } + public Builder removeResult(int index) { + if (resultBuilder_ == null) { + ensureResultIsMutable(); + result_.remove(index); + onChanged(); + } else { + resultBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder getResultBuilder( + int index) { + return getResultFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder getResultOrBuilder( + int index) { + if (resultBuilder_ == null) { + return result_.get(index); } else { + return resultBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getResultOrBuilderList() { + if (resultBuilder_ != null) { + return resultBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(result_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder addResultBuilder() { + return getResultFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder addResultBuilder( + int index) { + return getResultFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.getDefaultInstance()); + } + public java.util.List + getResultBuilderList() { + return getResultFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder> + getResultFieldBuilder() { + if (resultBuilder_ == null) { + resultBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ResultOrBuilder>( + result_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + result_ = null; + } + return resultBuilder_; + } + + // optional uint64 scannerId = 2; + private long scannerId_ ; + public boolean hasScannerId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getScannerId() { + return scannerId_; + } + public Builder setScannerId(long value) { + bitField0_ |= 0x00000002; + scannerId_ = value; + onChanged(); + return this; + } + public Builder clearScannerId() { + bitField0_ = (bitField0_ & ~0x00000002); + scannerId_ = 0L; + onChanged(); + return this; + } + + // optional bool moreResults = 3; + private boolean moreResults_ ; + public boolean hasMoreResults() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public boolean getMoreResults() { + return moreResults_; + } + public Builder setMoreResults(boolean value) { + bitField0_ |= 0x00000004; + moreResults_ = value; + onChanged(); + return this; + } + public Builder clearMoreResults() { + bitField0_ = (bitField0_ & ~0x00000004); + moreResults_ = false; + onChanged(); + return this; + } + + // optional uint32 ttl = 4; + private int ttl_ ; + public boolean hasTtl() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public int getTtl() { + return ttl_; + } + public Builder setTtl(int value) { + bitField0_ |= 0x00000008; + ttl_ = value; + onChanged(); + return this; + } + public Builder clearTtl() { + bitField0_ = (bitField0_ & ~0x00000008); + ttl_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:ScanResponse) + } + + static { + defaultInstance = new ScanResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ScanResponse) + } + + public interface LockRowRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated bytes row = 2; + java.util.List getRowList(); + int getRowCount(); + com.google.protobuf.ByteString getRow(int index); + } + public static final class LockRowRequest extends + com.google.protobuf.GeneratedMessage + implements LockRowRequestOrBuilder { + // Use LockRowRequest.newBuilder() to construct. + private LockRowRequest(Builder builder) { + super(builder); + } + private LockRowRequest(boolean noInit) {} + + private static final LockRowRequest defaultInstance; + public static LockRowRequest getDefaultInstance() { + return defaultInstance; + } + + public LockRowRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated bytes row = 2; + public static final int ROW_FIELD_NUMBER = 2; + private java.util.List row_; + public java.util.List + getRowList() { + return row_; + } + public int getRowCount() { + return row_.size(); + } + public com.google.protobuf.ByteString getRow(int index) { + return row_.get(index); + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + row_ = java.util.Collections.emptyList();; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < row_.size(); i++) { + output.writeBytes(2, row_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + { + int dataSize = 0; + for (int i = 0; i < row_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(row_.get(i)); + } + size += dataSize; + size += 1 * getRowList().size(); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getRowList() + .equals(other.getRowList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getRowCount() > 0) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRowList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + row_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + row_ = java.util.Collections.unmodifiableList(row_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.row_ = row_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (!other.row_.isEmpty()) { + if (row_.isEmpty()) { + row_ = other.row_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureRowIsMutable(); + row_.addAll(other.row_); + } + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + ensureRowIsMutable(); + row_.add(input.readBytes()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated bytes row = 2; + private java.util.List row_ = java.util.Collections.emptyList();; + private void ensureRowIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + row_ = new java.util.ArrayList(row_); + bitField0_ |= 0x00000002; + } + } + public java.util.List + getRowList() { + return java.util.Collections.unmodifiableList(row_); + } + public int getRowCount() { + return row_.size(); + } + public com.google.protobuf.ByteString getRow(int index) { + return row_.get(index); + } + public Builder setRow( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRowIsMutable(); + row_.set(index, value); + onChanged(); + return this; + } + public Builder addRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureRowIsMutable(); + row_.add(value); + onChanged(); + return this; + } + public Builder addAllRow( + java.lang.Iterable values) { + ensureRowIsMutable(); + super.addAll(values, row_); + onChanged(); + return this; + } + public Builder clearRow() { + row_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:LockRowRequest) + } + + static { + defaultInstance = new LockRowRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LockRowRequest) + } + + public interface LockRowResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 lockId = 1; + boolean hasLockId(); + long getLockId(); + + // optional uint32 ttl = 2; + boolean hasTtl(); + int getTtl(); + } + public static final class LockRowResponse extends + com.google.protobuf.GeneratedMessage + implements LockRowResponseOrBuilder { + // Use LockRowResponse.newBuilder() to construct. + private LockRowResponse(Builder builder) { + super(builder); + } + private LockRowResponse(boolean noInit) {} + + private static final LockRowResponse defaultInstance; + public static LockRowResponse getDefaultInstance() { + return defaultInstance; + } + + public LockRowResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 lockId = 1; + public static final int LOCKID_FIELD_NUMBER = 1; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLockId() { + return lockId_; + } + + // optional uint32 ttl = 2; + public static final int TTL_FIELD_NUMBER = 2; + private int ttl_; + public boolean hasTtl() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTtl() { + return ttl_; + } + + private void initFields() { + lockId_ = 0L; + ttl_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLockId()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, lockId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt32(2, ttl_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, lockId_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(2, ttl_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse) obj; + + boolean result = true; + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && (hasTtl() == other.hasTtl()); + if (hasTtl()) { + result = result && (getTtl() + == other.getTtl()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + if (hasTtl()) { + hash = (37 * hash) + TTL_FIELD_NUMBER; + hash = (53 * hash) + getTtl(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_LockRowResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + ttl_ = 0; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.lockId_ = lockId_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.ttl_ = ttl_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance()) return this; + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + if (other.hasTtl()) { + setTtl(other.getTtl()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLockId()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + lockId_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + ttl_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 lockId = 1; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000001; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000001); + lockId_ = 0L; + onChanged(); + return this; + } + + // optional uint32 ttl = 2; + private int ttl_ ; + public boolean hasTtl() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public int getTtl() { + return ttl_; + } + public Builder setTtl(int value) { + bitField0_ |= 0x00000002; + ttl_ = value; + onChanged(); + return this; + } + public Builder clearTtl() { + bitField0_ = (bitField0_ & ~0x00000002); + ttl_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:LockRowResponse) + } + + static { + defaultInstance = new LockRowResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:LockRowResponse) + } + + public interface UnlockRowRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required uint64 lockId = 2; + boolean hasLockId(); + long getLockId(); + } + public static final class UnlockRowRequest extends + com.google.protobuf.GeneratedMessage + implements UnlockRowRequestOrBuilder { + // Use UnlockRowRequest.newBuilder() to construct. + private UnlockRowRequest(Builder builder) { + super(builder); + } + private UnlockRowRequest(boolean noInit) {} + + private static final UnlockRowRequest defaultInstance; + public static UnlockRowRequest getDefaultInstance() { + return defaultInstance; + } + + public UnlockRowRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required uint64 lockId = 2; + public static final int LOCKID_FIELD_NUMBER = 2; + private long lockId_; + public boolean hasLockId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getLockId() { + return lockId_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + lockId_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasLockId()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, lockId_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, lockId_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasLockId() == other.hasLockId()); + if (hasLockId()) { + result = result && (getLockId() + == other.getLockId()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasLockId()) { + hash = (37 * hash) + LOCKID_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLockId()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + lockId_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.lockId_ = lockId_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasLockId()) { + setLockId(other.getLockId()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasLockId()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + lockId_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required uint64 lockId = 2; + private long lockId_ ; + public boolean hasLockId() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getLockId() { + return lockId_; + } + public Builder setLockId(long value) { + bitField0_ |= 0x00000002; + lockId_ = value; + onChanged(); + return this; + } + public Builder clearLockId() { + bitField0_ = (bitField0_ & ~0x00000002); + lockId_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UnlockRowRequest) + } + + static { + defaultInstance = new UnlockRowRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UnlockRowRequest) + } + + public interface UnlockRowResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + } + public static final class UnlockRowResponse extends + com.google.protobuf.GeneratedMessage + implements UnlockRowResponseOrBuilder { + // Use UnlockRowResponse.newBuilder() to construct. + private UnlockRowResponse(Builder builder) { + super(builder); + } + private UnlockRowResponse(boolean noInit) {} + + private static final UnlockRowResponse defaultInstance; + public static UnlockRowResponse getDefaultInstance() { + return defaultInstance; + } + + public UnlockRowResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; + } + + private void initFields() { + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse) obj; + + boolean result = true; + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_UnlockRowResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse(this); + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance()) return this; + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + } + } + } + + + // @@protoc_insertion_point(builder_scope:UnlockRowResponse) + } + + static { + defaultInstance = new UnlockRowResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UnlockRowResponse) + } + + public interface BulkLoadHFileRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + java.util.List + getFamilyPathList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index); + int getFamilyPathCount(); + java.util.List + getFamilyPathOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index); + } + public static final class BulkLoadHFileRequest extends + com.google.protobuf.GeneratedMessage + implements BulkLoadHFileRequestOrBuilder { + // Use BulkLoadHFileRequest.newBuilder() to construct. + private BulkLoadHFileRequest(Builder builder) { + super(builder); + } + private BulkLoadHFileRequest(boolean noInit) {} + + private static final BulkLoadHFileRequest defaultInstance; + public static BulkLoadHFileRequest getDefaultInstance() { + return defaultInstance; + } + + public BulkLoadHFileRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + } + + public interface FamilyPathOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required string path = 2; + boolean hasPath(); + String getPath(); + } + public static final class FamilyPath extends + com.google.protobuf.GeneratedMessage + implements FamilyPathOrBuilder { + // Use FamilyPath.newBuilder() to construct. + private FamilyPath(Builder builder) { + super(builder); + } + private FamilyPath(boolean noInit) {} + + private static final FamilyPath defaultInstance; + public static FamilyPath getDefaultInstance() { + return defaultInstance; + } + + public FamilyPath getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required string path = 2; + public static final int PATH_FIELD_NUMBER = 2; + private java.lang.Object path_; + public boolean hasPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getPath() { + java.lang.Object ref = path_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + path_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getPathBytes() { + java.lang.Object ref = path_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + path_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + path_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasPath()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getPathBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getPathBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasPath() == other.hasPath()); + if (hasPath()) { + result = result && getPath() + .equals(other.getPath()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasPath()) { + hash = (37 * hash) + PATH_FIELD_NUMBER; + hash = (53 * hash) + getPath().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + path_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.path_ = path_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasPath()) { + setPath(other.getPath()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + if (!hasPath()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + path_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required string path = 2; + private java.lang.Object path_ = ""; + public boolean hasPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getPath() { + java.lang.Object ref = path_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + path_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setPath(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + path_ = value; + onChanged(); + return this; + } + public Builder clearPath() { + bitField0_ = (bitField0_ & ~0x00000002); + path_ = getDefaultInstance().getPath(); + onChanged(); + return this; + } + void setPath(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + path_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest.FamilyPath) + } + + static { + defaultInstance = new FamilyPath(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest.FamilyPath) + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + public static final int FAMILYPATH_FIELD_NUMBER = 2; + private java.util.List familyPath_; + public java.util.List getFamilyPathList() { + return familyPath_; + } + public java.util.List + getFamilyPathOrBuilderList() { + return familyPath_; + } + public int getFamilyPathCount() { + return familyPath_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + return familyPath_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index) { + return familyPath_.get(index); + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + familyPath_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getFamilyPathCount(); i++) { + if (!getFamilyPath(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + for (int i = 0; i < familyPath_.size(); i++) { + output.writeMessage(2, familyPath_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + for (int i = 0; i < familyPath_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, familyPath_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && getFamilyPathList() + .equals(other.getFamilyPathList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (getFamilyPathCount() > 0) { + hash = (37 * hash) + FAMILYPATH_FIELD_NUMBER; + hash = (53 * hash) + getFamilyPathList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getFamilyPathFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (familyPathBuilder_ == null) { + familyPath_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + familyPathBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (familyPathBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + familyPath_ = java.util.Collections.unmodifiableList(familyPath_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.familyPath_ = familyPath_; + } else { + result.familyPath_ = familyPathBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (familyPathBuilder_ == null) { + if (!other.familyPath_.isEmpty()) { + if (familyPath_.isEmpty()) { + familyPath_ = other.familyPath_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureFamilyPathIsMutable(); + familyPath_.addAll(other.familyPath_); + } + onChanged(); + } + } else { + if (!other.familyPath_.isEmpty()) { + if (familyPathBuilder_.isEmpty()) { + familyPathBuilder_.dispose(); + familyPathBuilder_ = null; + familyPath_ = other.familyPath_; + bitField0_ = (bitField0_ & ~0x00000002); + familyPathBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getFamilyPathFieldBuilder() : null; + } else { + familyPathBuilder_.addAllMessages(other.familyPath_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + for (int i = 0; i < getFamilyPathCount(); i++) { + if (!getFamilyPath(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addFamilyPath(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // repeated .BulkLoadHFileRequest.FamilyPath familyPath = 2; + private java.util.List familyPath_ = + java.util.Collections.emptyList(); + private void ensureFamilyPathIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + familyPath_ = new java.util.ArrayList(familyPath_); + bitField0_ |= 0x00000002; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> familyPathBuilder_; + + public java.util.List getFamilyPathList() { + if (familyPathBuilder_ == null) { + return java.util.Collections.unmodifiableList(familyPath_); + } else { + return familyPathBuilder_.getMessageList(); + } + } + public int getFamilyPathCount() { + if (familyPathBuilder_ == null) { + return familyPath_.size(); + } else { + return familyPathBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) { + if (familyPathBuilder_ == null) { + return familyPath_.get(index); + } else { + return familyPathBuilder_.getMessage(index); + } + } + public Builder setFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.set(index, value); + onChanged(); + } else { + familyPathBuilder_.setMessage(index, value); + } + return this; + } + public Builder setFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.set(index, builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.add(value); + onChanged(); + } else { + familyPathBuilder_.addMessage(value); + } + return this; + } + public Builder addFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath value) { + if (familyPathBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureFamilyPathIsMutable(); + familyPath_.add(index, value); + onChanged(); + } else { + familyPathBuilder_.addMessage(index, value); + } + return this; + } + public Builder addFamilyPath( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.add(builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addFamilyPath( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.add(index, builderForValue.build()); + onChanged(); + } else { + familyPathBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllFamilyPath( + java.lang.Iterable values) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + super.addAll(values, familyPath_); + onChanged(); + } else { + familyPathBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearFamilyPath() { + if (familyPathBuilder_ == null) { + familyPath_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + familyPathBuilder_.clear(); + } + return this; + } + public Builder removeFamilyPath(int index) { + if (familyPathBuilder_ == null) { + ensureFamilyPathIsMutable(); + familyPath_.remove(index); + onChanged(); + } else { + familyPathBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder( + int index) { + return getFamilyPathFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder( + int index) { + if (familyPathBuilder_ == null) { + return familyPath_.get(index); } else { + return familyPathBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getFamilyPathOrBuilderList() { + if (familyPathBuilder_ != null) { + return familyPathBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(familyPath_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() { + return getFamilyPathFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder( + int index) { + return getFamilyPathFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance()); + } + public java.util.List + getFamilyPathBuilderList() { + return getFamilyPathFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder> + getFamilyPathFieldBuilder() { + if (familyPathBuilder_ == null) { + familyPathBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>( + familyPath_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + familyPath_ = null; + } + return familyPathBuilder_; + } + + // @@protoc_insertion_point(builder_scope:BulkLoadHFileRequest) + } + + static { + defaultInstance = new BulkLoadHFileRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BulkLoadHFileRequest) + } + + public interface BulkLoadHFileResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bool loaded = 1; + boolean hasLoaded(); + boolean getLoaded(); + } + public static final class BulkLoadHFileResponse extends + com.google.protobuf.GeneratedMessage + implements BulkLoadHFileResponseOrBuilder { + // Use BulkLoadHFileResponse.newBuilder() to construct. + private BulkLoadHFileResponse(Builder builder) { + super(builder); + } + private BulkLoadHFileResponse(boolean noInit) {} + + private static final BulkLoadHFileResponse defaultInstance; + public static BulkLoadHFileResponse getDefaultInstance() { + return defaultInstance; + } + + public BulkLoadHFileResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bool loaded = 1; + public static final int LOADED_FIELD_NUMBER = 1; + private boolean loaded_; + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLoaded() { + return loaded_; + } + + private void initFields() { + loaded_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLoaded()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, loaded_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, loaded_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse) obj; + + boolean result = true; + result = result && (hasLoaded() == other.hasLoaded()); + if (hasLoaded()) { + result = result && (getLoaded() + == other.getLoaded()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLoaded()) { + hash = (37 * hash) + LOADED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getLoaded()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_BulkLoadHFileResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + loaded_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.loaded_ = loaded_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance()) return this; + if (other.hasLoaded()) { + setLoaded(other.getLoaded()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLoaded()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + loaded_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // required bool loaded = 1; + private boolean loaded_ ; + public boolean hasLoaded() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getLoaded() { + return loaded_; + } + public Builder setLoaded(boolean value) { + bitField0_ |= 0x00000001; + loaded_ = value; + onChanged(); + return this; + } + public Builder clearLoaded() { + bitField0_ = (bitField0_ & ~0x00000001); + loaded_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:BulkLoadHFileResponse) + } + + static { + defaultInstance = new BulkLoadHFileResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:BulkLoadHFileResponse) + } + + public interface ParameterOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string type = 1; + boolean hasType(); + String getType(); + + // optional bytes binaryValue = 2; + boolean hasBinaryValue(); + com.google.protobuf.ByteString getBinaryValue(); + } + public static final class Parameter extends + com.google.protobuf.GeneratedMessage + implements ParameterOrBuilder { + // Use Parameter.newBuilder() to construct. + private Parameter(Builder builder) { + super(builder); + } + private Parameter(boolean noInit) {} + + private static final Parameter defaultInstance; + public static Parameter getDefaultInstance() { + return defaultInstance; + } + + public Parameter getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_fieldAccessorTable; + } + + private int bitField0_; + // required string type = 1; + public static final int TYPE_FIELD_NUMBER = 1; + private java.lang.Object type_; + public boolean hasType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getType() { + java.lang.Object ref = type_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + type_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bytes binaryValue = 2; + public static final int BINARYVALUE_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString binaryValue_; + public boolean hasBinaryValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getBinaryValue() { + return binaryValue_; + } + + private void initFields() { + type_ = ""; + binaryValue_ = com.google.protobuf.ByteString.EMPTY; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasType()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getTypeBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, binaryValue_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getTypeBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, binaryValue_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter) obj; + + boolean result = true; + result = result && (hasType() == other.hasType()); + if (hasType()) { + result = result && getType() + .equals(other.getType()); + } + result = result && (hasBinaryValue() == other.hasBinaryValue()); + if (hasBinaryValue()) { + result = result && getBinaryValue() + .equals(other.getBinaryValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasType()) { + hash = (37 * hash) + TYPE_FIELD_NUMBER; + hash = (53 * hash) + getType().hashCode(); + } + if (hasBinaryValue()) { + hash = (37 * hash) + BINARYVALUE_FIELD_NUMBER; + hash = (53 * hash) + getBinaryValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Parameter_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + type_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + binaryValue_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.type_ = type_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.binaryValue_ = binaryValue_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) return this; + if (other.hasType()) { + setType(other.getType()); + } + if (other.hasBinaryValue()) { + setBinaryValue(other.getBinaryValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasType()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + type_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + binaryValue_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string type = 1; + private java.lang.Object type_ = ""; + public boolean hasType() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getType() { + java.lang.Object ref = type_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + type_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setType(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + type_ = value; + onChanged(); + return this; + } + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000001); + type_ = getDefaultInstance().getType(); + onChanged(); + return this; + } + void setType(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + type_ = value; + onChanged(); + } + + // optional bytes binaryValue = 2; + private com.google.protobuf.ByteString binaryValue_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasBinaryValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getBinaryValue() { + return binaryValue_; + } + public Builder setBinaryValue(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + binaryValue_ = value; + onChanged(); + return this; + } + public Builder clearBinaryValue() { + bitField0_ = (bitField0_ & ~0x00000002); + binaryValue_ = getDefaultInstance().getBinaryValue(); + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:Parameter) + } + + static { + defaultInstance = new Parameter(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Parameter) + } + + public interface PropertyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string name = 1; + boolean hasName(); + String getName(); + + // required string value = 2; + boolean hasValue(); + String getValue(); + } + public static final class Property extends + com.google.protobuf.GeneratedMessage + implements PropertyOrBuilder { + // Use Property.newBuilder() to construct. + private Property(Builder builder) { + super(builder); + } + private Property(boolean noInit) {} + + private static final Property defaultInstance; + public static Property getDefaultInstance() { + return defaultInstance; + } + + public Property getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_fieldAccessorTable; + } + + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + name_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private java.lang.Object value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getValue() { + java.lang.Object ref = value_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + value_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getValueBytes() { + java.lang.Object ref = value_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + value_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + name_ = ""; + value_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getValueBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getValueBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property) obj; + + boolean result = true; + result = result && (hasName() == other.hasName()); + if (hasName()) { + result = result && getName() + .equals(other.getName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasName()) { + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Property_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + value_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.value_ = value_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance()) return this; + if (other.hasName()) { + setName(other.getName()); + } + if (other.hasValue()) { + setValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + if (!hasValue()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + name_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + value_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required string name = 1; + private java.lang.Object name_ = ""; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + name_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; + } + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; + } + void setName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + } + + // required string value = 2; + private java.lang.Object value_ = ""; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getValue() { + java.lang.Object ref = value_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + value_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setValue(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + return this; + } + public Builder clearValue() { + bitField0_ = (bitField0_ & ~0x00000002); + value_ = getDefaultInstance().getValue(); + onChanged(); + return this; + } + void setValue(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + value_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:Property) + } + + static { + defaultInstance = new Property(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Property) + } + + public interface ExecOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes row = 1; + boolean hasRow(); + com.google.protobuf.ByteString getRow(); + + // required string protocolName = 2; + boolean hasProtocolName(); + String getProtocolName(); + + // required string methodName = 3; + boolean hasMethodName(); + String getMethodName(); + + // repeated .Property property = 4; + java.util.List + getPropertyList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getProperty(int index); + int getPropertyCount(); + java.util.List + getPropertyOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder getPropertyOrBuilder( + int index); + + // repeated .Parameter parameter = 5; + java.util.List + getParameterList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getParameter(int index); + int getParameterCount(); + java.util.List + getParameterOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getParameterOrBuilder( + int index); + } + public static final class Exec extends + com.google.protobuf.GeneratedMessage + implements ExecOrBuilder { + // Use Exec.newBuilder() to construct. + private Exec(Builder builder) { + super(builder); + } + private Exec(boolean noInit) {} + + private static final Exec defaultInstance; + public static Exec getDefaultInstance() { + return defaultInstance; + } + + public Exec getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_fieldAccessorTable; + } + + private int bitField0_; + // required bytes row = 1; + public static final int ROW_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString row_; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + + // required string protocolName = 2; + public static final int PROTOCOLNAME_FIELD_NUMBER = 2; + private java.lang.Object protocolName_; + public boolean hasProtocolName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getProtocolName() { + java.lang.Object ref = protocolName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + protocolName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getProtocolNameBytes() { + java.lang.Object ref = protocolName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + protocolName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string methodName = 3; + public static final int METHODNAME_FIELD_NUMBER = 3; + private java.lang.Object methodName_; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + methodName_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // repeated .Property property = 4; + public static final int PROPERTY_FIELD_NUMBER = 4; + private java.util.List property_; + public java.util.List getPropertyList() { + return property_; + } + public java.util.List + getPropertyOrBuilderList() { + return property_; + } + public int getPropertyCount() { + return property_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getProperty(int index) { + return property_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder getPropertyOrBuilder( + int index) { + return property_.get(index); + } + + // repeated .Parameter parameter = 5; + public static final int PARAMETER_FIELD_NUMBER = 5; + private java.util.List parameter_; + public java.util.List getParameterList() { + return parameter_; + } + public java.util.List + getParameterOrBuilderList() { + return parameter_; + } + public int getParameterCount() { + return parameter_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getParameter(int index) { + return parameter_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getParameterOrBuilder( + int index) { + return parameter_.get(index); + } + + private void initFields() { + row_ = com.google.protobuf.ByteString.EMPTY; + protocolName_ = ""; + methodName_ = ""; + property_ = java.util.Collections.emptyList(); + parameter_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRow()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasProtocolName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMethodName()) { + memoizedIsInitialized = 0; + return false; + } + for (int i = 0; i < getPropertyCount(); i++) { + if (!getProperty(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getParameterCount(); i++) { + if (!getParameter(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getProtocolNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getMethodNameBytes()); + } + for (int i = 0; i < property_.size(); i++) { + output.writeMessage(4, property_.get(i)); + } + for (int i = 0; i < parameter_.size(); i++) { + output.writeMessage(5, parameter_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, row_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getProtocolNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getMethodNameBytes()); + } + for (int i = 0; i < property_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(4, property_.get(i)); + } + for (int i = 0; i < parameter_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, parameter_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec) obj; + + boolean result = true; + result = result && (hasRow() == other.hasRow()); + if (hasRow()) { + result = result && getRow() + .equals(other.getRow()); + } + result = result && (hasProtocolName() == other.hasProtocolName()); + if (hasProtocolName()) { + result = result && getProtocolName() + .equals(other.getProtocolName()); + } + result = result && (hasMethodName() == other.hasMethodName()); + if (hasMethodName()) { + result = result && getMethodName() + .equals(other.getMethodName()); + } + result = result && getPropertyList() + .equals(other.getPropertyList()); + result = result && getParameterList() + .equals(other.getParameterList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRow()) { + hash = (37 * hash) + ROW_FIELD_NUMBER; + hash = (53 * hash) + getRow().hashCode(); + } + if (hasProtocolName()) { + hash = (37 * hash) + PROTOCOLNAME_FIELD_NUMBER; + hash = (53 * hash) + getProtocolName().hashCode(); + } + if (hasMethodName()) { + hash = (37 * hash) + METHODNAME_FIELD_NUMBER; + hash = (53 * hash) + getMethodName().hashCode(); + } + if (getPropertyCount() > 0) { + hash = (37 * hash) + PROPERTY_FIELD_NUMBER; + hash = (53 * hash) + getPropertyList().hashCode(); + } + if (getParameterCount() > 0) { + hash = (37 * hash) + PARAMETER_FIELD_NUMBER; + hash = (53 * hash) + getParameterList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_Exec_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getPropertyFieldBuilder(); + getParameterFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + row_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + protocolName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + methodName_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + if (propertyBuilder_ == null) { + property_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + } else { + propertyBuilder_.clear(); + } + if (parameterBuilder_ == null) { + parameter_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + } else { + parameterBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.row_ = row_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.protocolName_ = protocolName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.methodName_ = methodName_; + if (propertyBuilder_ == null) { + if (((bitField0_ & 0x00000008) == 0x00000008)) { + property_ = java.util.Collections.unmodifiableList(property_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.property_ = property_; + } else { + result.property_ = propertyBuilder_.build(); + } + if (parameterBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010)) { + parameter_ = java.util.Collections.unmodifiableList(parameter_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.parameter_ = parameter_; + } else { + result.parameter_ = parameterBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance()) return this; + if (other.hasRow()) { + setRow(other.getRow()); + } + if (other.hasProtocolName()) { + setProtocolName(other.getProtocolName()); + } + if (other.hasMethodName()) { + setMethodName(other.getMethodName()); + } + if (propertyBuilder_ == null) { + if (!other.property_.isEmpty()) { + if (property_.isEmpty()) { + property_ = other.property_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensurePropertyIsMutable(); + property_.addAll(other.property_); + } + onChanged(); + } + } else { + if (!other.property_.isEmpty()) { + if (propertyBuilder_.isEmpty()) { + propertyBuilder_.dispose(); + propertyBuilder_ = null; + property_ = other.property_; + bitField0_ = (bitField0_ & ~0x00000008); + propertyBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getPropertyFieldBuilder() : null; + } else { + propertyBuilder_.addAllMessages(other.property_); + } + } + } + if (parameterBuilder_ == null) { + if (!other.parameter_.isEmpty()) { + if (parameter_.isEmpty()) { + parameter_ = other.parameter_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureParameterIsMutable(); + parameter_.addAll(other.parameter_); + } + onChanged(); + } + } else { + if (!other.parameter_.isEmpty()) { + if (parameterBuilder_.isEmpty()) { + parameterBuilder_.dispose(); + parameterBuilder_ = null; + parameter_ = other.parameter_; + bitField0_ = (bitField0_ & ~0x00000010); + parameterBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getParameterFieldBuilder() : null; + } else { + parameterBuilder_.addAllMessages(other.parameter_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRow()) { + + return false; + } + if (!hasProtocolName()) { + + return false; + } + if (!hasMethodName()) { + + return false; + } + for (int i = 0; i < getPropertyCount(); i++) { + if (!getProperty(i).isInitialized()) { + + return false; + } + } + for (int i = 0; i < getParameterCount(); i++) { + if (!getParameter(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + row_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + protocolName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + methodName_ = input.readBytes(); + break; + } + case 34: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addProperty(subBuilder.buildPartial()); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addParameter(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes row = 1; + private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRow() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRow() { + return row_; + } + public Builder setRow(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + row_ = value; + onChanged(); + return this; + } + public Builder clearRow() { + bitField0_ = (bitField0_ & ~0x00000001); + row_ = getDefaultInstance().getRow(); + onChanged(); + return this; + } + + // required string protocolName = 2; + private java.lang.Object protocolName_ = ""; + public boolean hasProtocolName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public String getProtocolName() { + java.lang.Object ref = protocolName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + protocolName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setProtocolName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + protocolName_ = value; + onChanged(); + return this; + } + public Builder clearProtocolName() { + bitField0_ = (bitField0_ & ~0x00000002); + protocolName_ = getDefaultInstance().getProtocolName(); + onChanged(); + return this; + } + void setProtocolName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000002; + protocolName_ = value; + onChanged(); + } + + // required string methodName = 3; + private java.lang.Object methodName_ = ""; + public boolean hasMethodName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public String getMethodName() { + java.lang.Object ref = methodName_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + methodName_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setMethodName(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + return this; + } + public Builder clearMethodName() { + bitField0_ = (bitField0_ & ~0x00000004); + methodName_ = getDefaultInstance().getMethodName(); + onChanged(); + return this; + } + void setMethodName(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000004; + methodName_ = value; + onChanged(); + } + + // repeated .Property property = 4; + private java.util.List property_ = + java.util.Collections.emptyList(); + private void ensurePropertyIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + property_ = new java.util.ArrayList(property_); + bitField0_ |= 0x00000008; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder> propertyBuilder_; + + public java.util.List getPropertyList() { + if (propertyBuilder_ == null) { + return java.util.Collections.unmodifiableList(property_); + } else { + return propertyBuilder_.getMessageList(); + } + } + public int getPropertyCount() { + if (propertyBuilder_ == null) { + return property_.size(); + } else { + return propertyBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property getProperty(int index) { + if (propertyBuilder_ == null) { + return property_.get(index); + } else { + return propertyBuilder_.getMessage(index); + } + } + public Builder setProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property value) { + if (propertyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePropertyIsMutable(); + property_.set(index, value); + onChanged(); + } else { + propertyBuilder_.setMessage(index, value); + } + return this; + } + public Builder setProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder builderForValue) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.set(index, builderForValue.build()); + onChanged(); + } else { + propertyBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addProperty(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property value) { + if (propertyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePropertyIsMutable(); + property_.add(value); + onChanged(); + } else { + propertyBuilder_.addMessage(value); + } + return this; + } + public Builder addProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property value) { + if (propertyBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensurePropertyIsMutable(); + property_.add(index, value); + onChanged(); + } else { + propertyBuilder_.addMessage(index, value); + } + return this; + } + public Builder addProperty( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder builderForValue) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.add(builderForValue.build()); + onChanged(); + } else { + propertyBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addProperty( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder builderForValue) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.add(index, builderForValue.build()); + onChanged(); + } else { + propertyBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllProperty( + java.lang.Iterable values) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + super.addAll(values, property_); + onChanged(); + } else { + propertyBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearProperty() { + if (propertyBuilder_ == null) { + property_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + } else { + propertyBuilder_.clear(); + } + return this; + } + public Builder removeProperty(int index) { + if (propertyBuilder_ == null) { + ensurePropertyIsMutable(); + property_.remove(index); + onChanged(); + } else { + propertyBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder getPropertyBuilder( + int index) { + return getPropertyFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder getPropertyOrBuilder( + int index) { + if (propertyBuilder_ == null) { + return property_.get(index); } else { + return propertyBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getPropertyOrBuilderList() { + if (propertyBuilder_ != null) { + return propertyBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(property_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder addPropertyBuilder() { + return getPropertyFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder addPropertyBuilder( + int index) { + return getPropertyFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.getDefaultInstance()); + } + public java.util.List + getPropertyBuilderList() { + return getPropertyFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder> + getPropertyFieldBuilder() { + if (propertyBuilder_ == null) { + propertyBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.PropertyOrBuilder>( + property_, + ((bitField0_ & 0x00000008) == 0x00000008), + getParentForChildren(), + isClean()); + property_ = null; + } + return propertyBuilder_; + } + + // repeated .Parameter parameter = 5; + private java.util.List parameter_ = + java.util.Collections.emptyList(); + private void ensureParameterIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + parameter_ = new java.util.ArrayList(parameter_); + bitField0_ |= 0x00000010; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> parameterBuilder_; + + public java.util.List getParameterList() { + if (parameterBuilder_ == null) { + return java.util.Collections.unmodifiableList(parameter_); + } else { + return parameterBuilder_.getMessageList(); + } + } + public int getParameterCount() { + if (parameterBuilder_ == null) { + return parameter_.size(); + } else { + return parameterBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getParameter(int index) { + if (parameterBuilder_ == null) { + return parameter_.get(index); + } else { + return parameterBuilder_.getMessage(index); + } + } + public Builder setParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (parameterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParameterIsMutable(); + parameter_.set(index, value); + onChanged(); + } else { + parameterBuilder_.setMessage(index, value); + } + return this; + } + public Builder setParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.set(index, builderForValue.build()); + onChanged(); + } else { + parameterBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addParameter(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (parameterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParameterIsMutable(); + parameter_.add(value); + onChanged(); + } else { + parameterBuilder_.addMessage(value); + } + return this; + } + public Builder addParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (parameterBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureParameterIsMutable(); + parameter_.add(index, value); + onChanged(); + } else { + parameterBuilder_.addMessage(index, value); + } + return this; + } + public Builder addParameter( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.add(builderForValue.build()); + onChanged(); + } else { + parameterBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addParameter( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.add(index, builderForValue.build()); + onChanged(); + } else { + parameterBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllParameter( + java.lang.Iterable values) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + super.addAll(values, parameter_); + onChanged(); + } else { + parameterBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearParameter() { + if (parameterBuilder_ == null) { + parameter_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + } else { + parameterBuilder_.clear(); + } + return this; + } + public Builder removeParameter(int index) { + if (parameterBuilder_ == null) { + ensureParameterIsMutable(); + parameter_.remove(index); + onChanged(); + } else { + parameterBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getParameterBuilder( + int index) { + return getParameterFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getParameterOrBuilder( + int index) { + if (parameterBuilder_ == null) { + return parameter_.get(index); } else { + return parameterBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getParameterOrBuilderList() { + if (parameterBuilder_ != null) { + return parameterBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(parameter_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addParameterBuilder() { + return getParameterFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addParameterBuilder( + int index) { + return getParameterFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + } + public java.util.List + getParameterBuilderList() { + return getParameterFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + getParameterFieldBuilder() { + if (parameterBuilder_ == null) { + parameterBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + parameter_, + ((bitField0_ & 0x00000010) == 0x00000010), + getParentForChildren(), + isClean()); + parameter_ = null; + } + return parameterBuilder_; + } + + // @@protoc_insertion_point(builder_scope:Exec) + } + + static { + defaultInstance = new Exec(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:Exec) + } + + public interface ExecCoprocessorRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .RegionSpecifier region = 1; + boolean hasRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); + + // required .Exec call = 2; + boolean hasCall(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getCall(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder getCallOrBuilder(); + } + public static final class ExecCoprocessorRequest extends + com.google.protobuf.GeneratedMessage + implements ExecCoprocessorRequestOrBuilder { + // Use ExecCoprocessorRequest.newBuilder() to construct. + private ExecCoprocessorRequest(Builder builder) { + super(builder); + } + private ExecCoprocessorRequest(boolean noInit) {} + + private static final ExecCoprocessorRequest defaultInstance; + public static ExecCoprocessorRequest getDefaultInstance() { + return defaultInstance; + } + + public ExecCoprocessorRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; + } + + private int bitField0_; + // required .RegionSpecifier region = 1; + public static final int REGION_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + return region_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + return region_; + } + + // required .Exec call = 2; + public static final int CALL_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec call_; + public boolean hasCall() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getCall() { + return call_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder getCallOrBuilder() { + return call_; + } + + private void initFields() { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegion()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCall()) { + memoizedIsInitialized = 0; + return false; + } + if (!getRegion().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + if (!getCall().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, call_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, region_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, call_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest) obj; + + boolean result = true; + result = result && (hasRegion() == other.hasRegion()); + if (hasRegion()) { + result = result && getRegion() + .equals(other.getRegion()); + } + result = result && (hasCall() == other.hasCall()); + if (hasCall()) { + result = result && getCall() + .equals(other.getCall()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegion()) { + hash = (37 * hash) + REGION_FIELD_NUMBER; + hash = (53 * hash) + getRegion().hashCode(); + } + if (hasCall()) { + hash = (37 * hash) + CALL_FIELD_NUMBER; + hash = (53 * hash) + getCall().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRegionFieldBuilder(); + getCallFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (callBuilder_ == null) { + call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + } else { + callBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (regionBuilder_ == null) { + result.region_ = region_; + } else { + result.region_ = regionBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (callBuilder_ == null) { + result.call_ = call_; + } else { + result.call_ = callBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance()) return this; + if (other.hasRegion()) { + mergeRegion(other.getRegion()); + } + if (other.hasCall()) { + mergeCall(other.getCall()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegion()) { + + return false; + } + if (!hasCall()) { + + return false; + } + if (!getRegion().isInitialized()) { + + return false; + } + if (!getCall().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); + if (hasRegion()) { + subBuilder.mergeFrom(getRegion()); + } + input.readMessage(subBuilder, extensionRegistry); + setRegion(subBuilder.buildPartial()); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.newBuilder(); + if (hasCall()) { + subBuilder.mergeFrom(getCall()); + } + input.readMessage(subBuilder, extensionRegistry); + setCall(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required .RegionSpecifier region = 1; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; + public boolean hasRegion() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { + if (regionBuilder_ == null) { + return region_; + } else { + return regionBuilder_.getMessage(); + } + } + public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + region_ = value; + onChanged(); + } else { + regionBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder setRegion( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { + if (regionBuilder_ == null) { + region_ = builderForValue.build(); + onChanged(); + } else { + regionBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { + if (regionBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { + region_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); + } else { + region_ = value; + } + onChanged(); + } else { + regionBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + public Builder clearRegion() { + if (regionBuilder_ == null) { + region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + onChanged(); + } else { + regionBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getRegionFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { + if (regionBuilder_ != null) { + return regionBuilder_.getMessageOrBuilder(); + } else { + return region_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> + getRegionFieldBuilder() { + if (regionBuilder_ == null) { + regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( + region_, + getParentForChildren(), + isClean()); + region_ = null; + } + return regionBuilder_; + } + + // required .Exec call = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder> callBuilder_; + public boolean hasCall() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec getCall() { + if (callBuilder_ == null) { + return call_; + } else { + return callBuilder_.getMessage(); + } + } + public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec value) { + if (callBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + call_ = value; + onChanged(); + } else { + callBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setCall( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder builderForValue) { + if (callBuilder_ == null) { + call_ = builderForValue.build(); + onChanged(); + } else { + callBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec value) { + if (callBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + call_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance()) { + call_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.newBuilder(call_).mergeFrom(value).buildPartial(); + } else { + call_ = value; + } + onChanged(); + } else { + callBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearCall() { + if (callBuilder_ == null) { + call_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.getDefaultInstance(); + onChanged(); + } else { + callBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder getCallBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getCallFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder getCallOrBuilder() { + if (callBuilder_ != null) { + return callBuilder_.getMessageOrBuilder(); + } else { + return call_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder> + getCallFieldBuilder() { + if (callBuilder_ == null) { + callBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecOrBuilder>( + call_, + getParentForChildren(), + isClean()); + call_ = null; + } + return callBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ExecCoprocessorRequest) + } + + static { + defaultInstance = new ExecCoprocessorRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ExecCoprocessorRequest) + } + + public interface ExecCoprocessorResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes regionName = 1; + boolean hasRegionName(); + com.google.protobuf.ByteString getRegionName(); + + // required .Parameter value = 2; + boolean hasValue(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getValue(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getValueOrBuilder(); + } + public static final class ExecCoprocessorResponse extends + com.google.protobuf.GeneratedMessage + implements ExecCoprocessorResponseOrBuilder { + // Use ExecCoprocessorResponse.newBuilder() to construct. + private ExecCoprocessorResponse(Builder builder) { + super(builder); + } + private ExecCoprocessorResponse(boolean noInit) {} + + private static final ExecCoprocessorResponse defaultInstance; + public static ExecCoprocessorResponse getDefaultInstance() { + return defaultInstance; + } + + public ExecCoprocessorResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; + } + + private int bitField0_; + // required bytes regionName = 1; + public static final int REGIONNAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString regionName_; + public boolean hasRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRegionName() { + return regionName_; + } + + // required .Parameter value = 2; + public static final int VALUE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getValue() { + return value_; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getValueOrBuilder() { + return value_; + } + + private void initFields() { + regionName_ = com.google.protobuf.ByteString.EMPTY; + value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRegionName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasValue()) { + memoizedIsInitialized = 0; + return false; + } + if (!getValue().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, regionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeMessage(2, value_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, regionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(2, value_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse) obj; + + boolean result = true; + result = result && (hasRegionName() == other.hasRegionName()); + if (hasRegionName()) { + result = result && getRegionName() + .equals(other.getRegionName()); + } + result = result && (hasValue() == other.hasValue()); + if (hasValue()) { + result = result && getValue() + .equals(other.getValue()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRegionName()) { + hash = (37 * hash) + REGIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getRegionName().hashCode(); + } + if (hasValue()) { + hash = (37 * hash) + VALUE_FIELD_NUMBER; + hash = (53 * hash) + getValue().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getValueFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + regionName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.regionName_ = regionName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + if (valueBuilder_ == null) { + result.value_ = value_; + } else { + result.value_ = valueBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance()) return this; + if (other.hasRegionName()) { + setRegionName(other.getRegionName()); + } + if (other.hasValue()) { + mergeValue(other.getValue()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRegionName()) { + + return false; + } + if (!hasValue()) { + + return false; + } + if (!getValue().isInitialized()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + regionName_ = input.readBytes(); + break; + } + case 18: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + if (hasValue()) { + subBuilder.mergeFrom(getValue()); + } + input.readMessage(subBuilder, extensionRegistry); + setValue(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // required bytes regionName = 1; + private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getRegionName() { + return regionName_; + } + public Builder setRegionName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + regionName_ = value; + onChanged(); + return this; + } + public Builder clearRegionName() { + bitField0_ = (bitField0_ & ~0x00000001); + regionName_ = getDefaultInstance().getRegionName(); + onChanged(); + return this; + } + + // required .Parameter value = 2; + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> valueBuilder_; + public boolean hasValue() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getValue() { + if (valueBuilder_ == null) { + return value_; + } else { + return valueBuilder_.getMessage(); + } + } + public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (valueBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + value_ = value; + onChanged(); + } else { + valueBuilder_.setMessage(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder setValue( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (valueBuilder_ == null) { + value_ = builderForValue.build(); + onChanged(); + } else { + valueBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (valueBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002) && + value_ != org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()) { + value_ = + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(value_).mergeFrom(value).buildPartial(); + } else { + value_ = value; + } + onChanged(); + } else { + valueBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000002; + return this; + } + public Builder clearValue() { + if (valueBuilder_ == null) { + value_ = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance(); + onChanged(); + } else { + valueBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getValueBuilder() { + bitField0_ |= 0x00000002; + onChanged(); + return getValueFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getValueOrBuilder() { + if (valueBuilder_ != null) { + return valueBuilder_.getMessageOrBuilder(); + } else { + return value_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + getValueFieldBuilder() { + if (valueBuilder_ == null) { + valueBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + value_, + getParentForChildren(), + isClean()); + value_ = null; + } + return valueBuilder_; + } + + // @@protoc_insertion_point(builder_scope:ExecCoprocessorResponse) + } + + static { + defaultInstance = new ExecCoprocessorResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:ExecCoprocessorResponse) + } + + public interface MultiRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Parameter request = 1; + java.util.List + getRequestList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getRequest(int index); + int getRequestCount(); + java.util.List + getRequestOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getRequestOrBuilder( + int index); + + // optional bool atomic = 2; + boolean hasAtomic(); + boolean getAtomic(); + } + public static final class MultiRequest extends + com.google.protobuf.GeneratedMessage + implements MultiRequestOrBuilder { + // Use MultiRequest.newBuilder() to construct. + private MultiRequest(Builder builder) { + super(builder); + } + private MultiRequest(boolean noInit) {} + + private static final MultiRequest defaultInstance; + public static MultiRequest getDefaultInstance() { + return defaultInstance; + } + + public MultiRequest getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_fieldAccessorTable; + } + + private int bitField0_; + // repeated .Parameter request = 1; + public static final int REQUEST_FIELD_NUMBER = 1; + private java.util.List request_; + public java.util.List getRequestList() { + return request_; + } + public java.util.List + getRequestOrBuilderList() { + return request_; + } + public int getRequestCount() { + return request_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getRequest(int index) { + return request_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getRequestOrBuilder( + int index) { + return request_.get(index); + } + + // optional bool atomic = 2; + public static final int ATOMIC_FIELD_NUMBER = 2; + private boolean atomic_; + public boolean hasAtomic() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getAtomic() { + return atomic_; + } + + private void initFields() { + request_ = java.util.Collections.emptyList(); + atomic_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getRequestCount(); i++) { + if (!getRequest(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < request_.size(); i++) { + output.writeMessage(1, request_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(2, atomic_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < request_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, request_.get(i)); + } + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(2, atomic_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest) obj; + + boolean result = true; + result = result && getRequestList() + .equals(other.getRequestList()); + result = result && (hasAtomic() == other.hasAtomic()); + if (hasAtomic()) { + result = result && (getAtomic() + == other.getAtomic()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getRequestCount() > 0) { + hash = (37 * hash) + REQUEST_FIELD_NUMBER; + hash = (53 * hash) + getRequestList().hashCode(); + } + if (hasAtomic()) { + hash = (37 * hash) + ATOMIC_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getAtomic()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiRequest_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getRequestFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (requestBuilder_ == null) { + request_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + requestBuilder_.clear(); + } + atomic_ = false; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (requestBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + request_ = java.util.Collections.unmodifiableList(request_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.request_ = request_; + } else { + result.request_ = requestBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000001; + } + result.atomic_ = atomic_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance()) return this; + if (requestBuilder_ == null) { + if (!other.request_.isEmpty()) { + if (request_.isEmpty()) { + request_ = other.request_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureRequestIsMutable(); + request_.addAll(other.request_); + } + onChanged(); + } + } else { + if (!other.request_.isEmpty()) { + if (requestBuilder_.isEmpty()) { + requestBuilder_.dispose(); + requestBuilder_ = null; + request_ = other.request_; + bitField0_ = (bitField0_ & ~0x00000001); + requestBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getRequestFieldBuilder() : null; + } else { + requestBuilder_.addAllMessages(other.request_); + } + } + } + if (other.hasAtomic()) { + setAtomic(other.getAtomic()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getRequestCount(); i++) { + if (!getRequest(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addRequest(subBuilder.buildPartial()); + break; + } + case 16: { + bitField0_ |= 0x00000002; + atomic_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Parameter request = 1; + private java.util.List request_ = + java.util.Collections.emptyList(); + private void ensureRequestIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + request_ = new java.util.ArrayList(request_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> requestBuilder_; + + public java.util.List getRequestList() { + if (requestBuilder_ == null) { + return java.util.Collections.unmodifiableList(request_); + } else { + return requestBuilder_.getMessageList(); + } + } + public int getRequestCount() { + if (requestBuilder_ == null) { + return request_.size(); + } else { + return requestBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getRequest(int index) { + if (requestBuilder_ == null) { + return request_.get(index); + } else { + return requestBuilder_.getMessage(index); + } + } + public Builder setRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (requestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRequestIsMutable(); + request_.set(index, value); + onChanged(); + } else { + requestBuilder_.setMessage(index, value); + } + return this; + } + public Builder setRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + request_.set(index, builderForValue.build()); + onChanged(); + } else { + requestBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addRequest(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (requestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRequestIsMutable(); + request_.add(value); + onChanged(); + } else { + requestBuilder_.addMessage(value); + } + return this; + } + public Builder addRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (requestBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureRequestIsMutable(); + request_.add(index, value); + onChanged(); + } else { + requestBuilder_.addMessage(index, value); + } + return this; + } + public Builder addRequest( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + request_.add(builderForValue.build()); + onChanged(); + } else { + requestBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addRequest( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + request_.add(index, builderForValue.build()); + onChanged(); + } else { + requestBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllRequest( + java.lang.Iterable values) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + super.addAll(values, request_); + onChanged(); + } else { + requestBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearRequest() { + if (requestBuilder_ == null) { + request_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + requestBuilder_.clear(); + } + return this; + } + public Builder removeRequest(int index) { + if (requestBuilder_ == null) { + ensureRequestIsMutable(); + request_.remove(index); + onChanged(); + } else { + requestBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getRequestBuilder( + int index) { + return getRequestFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getRequestOrBuilder( + int index) { + if (requestBuilder_ == null) { + return request_.get(index); } else { + return requestBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getRequestOrBuilderList() { + if (requestBuilder_ != null) { + return requestBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(request_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addRequestBuilder() { + return getRequestFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addRequestBuilder( + int index) { + return getRequestFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + } + public java.util.List + getRequestBuilderList() { + return getRequestFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + getRequestFieldBuilder() { + if (requestBuilder_ == null) { + requestBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + request_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + request_ = null; + } + return requestBuilder_; + } + + // optional bool atomic = 2; + private boolean atomic_ ; + public boolean hasAtomic() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public boolean getAtomic() { + return atomic_; + } + public Builder setAtomic(boolean value) { + bitField0_ |= 0x00000002; + atomic_ = value; + onChanged(); + return this; + } + public Builder clearAtomic() { + bitField0_ = (bitField0_ & ~0x00000002); + atomic_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:MultiRequest) + } + + static { + defaultInstance = new MultiRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiRequest) + } + + public interface MultiResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // repeated .Parameter response = 1; + java.util.List + getResponseList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getResponse(int index); + int getResponseCount(); + java.util.List + getResponseOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getResponseOrBuilder( + int index); + } + public static final class MultiResponse extends + com.google.protobuf.GeneratedMessage + implements MultiResponseOrBuilder { + // Use MultiResponse.newBuilder() to construct. + private MultiResponse(Builder builder) { + super(builder); + } + private MultiResponse(boolean noInit) {} + + private static final MultiResponse defaultInstance; + public static MultiResponse getDefaultInstance() { + return defaultInstance; + } + + public MultiResponse getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_fieldAccessorTable; + } + + // repeated .Parameter response = 1; + public static final int RESPONSE_FIELD_NUMBER = 1; + private java.util.List response_; + public java.util.List getResponseList() { + return response_; + } + public java.util.List + getResponseOrBuilderList() { + return response_; + } + public int getResponseCount() { + return response_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getResponse(int index) { + return response_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getResponseOrBuilder( + int index) { + return response_.get(index); + } + + private void initFields() { + response_ = java.util.Collections.emptyList(); + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + for (int i = 0; i < getResponseCount(); i++) { + if (!getResponse(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + for (int i = 0; i < response_.size(); i++) { + output.writeMessage(1, response_.get(i)); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + for (int i = 0; i < response_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, response_.get(i)); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse other = (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse) obj; + + boolean result = true; + result = result && getResponseList() + .equals(other.getResponseList()); + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (getResponseCount() > 0) { + hash = (37 * hash) + RESPONSE_FIELD_NUMBER; + hash = (53 * hash) + getResponseList().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.internal_static_MultiResponse_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getResponseFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (responseBuilder_ == null) { + response_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + } else { + responseBuilder_.clear(); + } + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse build() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse result = new org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse(this); + int from_bitField0_ = bitField0_; + if (responseBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + response_ = java.util.Collections.unmodifiableList(response_); + bitField0_ = (bitField0_ & ~0x00000001); + } + result.response_ = response_; + } else { + result.response_ = responseBuilder_.build(); + } + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance()) return this; + if (responseBuilder_ == null) { + if (!other.response_.isEmpty()) { + if (response_.isEmpty()) { + response_ = other.response_; + bitField0_ = (bitField0_ & ~0x00000001); + } else { + ensureResponseIsMutable(); + response_.addAll(other.response_); + } + onChanged(); + } + } else { + if (!other.response_.isEmpty()) { + if (responseBuilder_.isEmpty()) { + responseBuilder_.dispose(); + responseBuilder_ = null; + response_ = other.response_; + bitField0_ = (bitField0_ & ~0x00000001); + responseBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getResponseFieldBuilder() : null; + } else { + responseBuilder_.addAllMessages(other.response_); + } + } + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + for (int i = 0; i < getResponseCount(); i++) { + if (!getResponse(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addResponse(subBuilder.buildPartial()); + break; + } + } + } + } + + private int bitField0_; + + // repeated .Parameter response = 1; + private java.util.List response_ = + java.util.Collections.emptyList(); + private void ensureResponseIsMutable() { + if (!((bitField0_ & 0x00000001) == 0x00000001)) { + response_ = new java.util.ArrayList(response_); + bitField0_ |= 0x00000001; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> responseBuilder_; + + public java.util.List getResponseList() { + if (responseBuilder_ == null) { + return java.util.Collections.unmodifiableList(response_); + } else { + return responseBuilder_.getMessageList(); + } + } + public int getResponseCount() { + if (responseBuilder_ == null) { + return response_.size(); + } else { + return responseBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter getResponse(int index) { + if (responseBuilder_ == null) { + return response_.get(index); + } else { + return responseBuilder_.getMessage(index); + } + } + public Builder setResponse( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (responseBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResponseIsMutable(); + response_.set(index, value); + onChanged(); + } else { + responseBuilder_.setMessage(index, value); + } + return this; + } + public Builder setResponse( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (responseBuilder_ == null) { + ensureResponseIsMutable(); + response_.set(index, builderForValue.build()); + onChanged(); + } else { + responseBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addResponse(org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (responseBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResponseIsMutable(); + response_.add(value); + onChanged(); + } else { + responseBuilder_.addMessage(value); + } + return this; + } + public Builder addResponse( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter value) { + if (responseBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureResponseIsMutable(); + response_.add(index, value); + onChanged(); + } else { + responseBuilder_.addMessage(index, value); + } + return this; + } + public Builder addResponse( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (responseBuilder_ == null) { + ensureResponseIsMutable(); + response_.add(builderForValue.build()); + onChanged(); + } else { + responseBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addResponse( + int index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder builderForValue) { + if (responseBuilder_ == null) { + ensureResponseIsMutable(); + response_.add(index, builderForValue.build()); + onChanged(); + } else { + responseBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllResponse( + java.lang.Iterable values) { + if (responseBuilder_ == null) { + ensureResponseIsMutable(); + super.addAll(values, response_); + onChanged(); + } else { + responseBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearResponse() { + if (responseBuilder_ == null) { + response_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + } else { + responseBuilder_.clear(); + } + return this; + } + public Builder removeResponse(int index) { + if (responseBuilder_ == null) { + ensureResponseIsMutable(); + response_.remove(index); + onChanged(); + } else { + responseBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder getResponseBuilder( + int index) { + return getResponseFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder getResponseOrBuilder( + int index) { + if (responseBuilder_ == null) { + return response_.get(index); } else { + return responseBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getResponseOrBuilderList() { + if (responseBuilder_ != null) { + return responseBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(response_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addResponseBuilder() { + return getResponseFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder addResponseBuilder( + int index) { + return getResponseFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.getDefaultInstance()); + } + public java.util.List + getResponseBuilderList() { + return getResponseFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder> + getResponseFieldBuilder() { + if (responseBuilder_ == null) { + responseBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder, org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ParameterOrBuilder>( + response_, + ((bitField0_ & 0x00000001) == 0x00000001), + getParentForChildren(), + isClean()); + response_ = null; + } + return responseBuilder_; + } + + // @@protoc_insertion_point(builder_scope:MultiResponse) + } + + static { + defaultInstance = new MultiResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:MultiResponse) + } + + public static abstract class RegionClientService + implements com.google.protobuf.Service { + protected RegionClientService() {} + + public interface Interface { + public abstract void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new RegionClientService() { + @java.lang.Override + public void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done) { + impl.get(controller, request, done); + } + + @java.lang.Override + public void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done) { + impl.mutate(controller, request, done); + } + + @java.lang.Override + public void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done) { + impl.scan(controller, request, done); + } + + @java.lang.Override + public void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done) { + impl.lockRow(controller, request, done); + } + + @java.lang.Override + public void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done) { + impl.unlockRow(controller, request, done); + } + + @java.lang.Override + public void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done) { + impl.bulkLoadHFile(controller, request, done); + } + + @java.lang.Override + public void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done) { + impl.execCoprocessor(controller, request, done); + } + + @java.lang.Override + public void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done) { + impl.multi(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.get(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)request); + case 1: + return impl.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)request); + case 2: + return impl.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)request); + case 3: + return impl.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)request); + case 4: + return impl.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)request); + case 5: + return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)request); + case 6: + return impl.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)request); + case 7: + return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + public abstract void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done); + + public abstract void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.get(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 1: + this.mutate(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 2: + this.scan(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 3: + this.lockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 4: + this.unlockRow(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 5: + this.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 6: + this.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + case 7: + this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest)request, + com.google.protobuf.RpcUtil.specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(); + case 1: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(); + case 2: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(); + case 3: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(); + case 4: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(); + case 5: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(); + case 6: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(); + case 7: + return org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.RegionClientService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance())); + } + + public void mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance())); + } + + public void scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance())); + } + + public void lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance())); + } + + public void unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance())); + } + + public void bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance())); + } + + public void execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance())); + } + + public void multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request, + com.google.protobuf.RpcCallback done) { + channel.callMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request) + throws com.google.protobuf.ServiceException; + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse get( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse mutate( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(1), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse scan( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(2), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse lockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(3), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse unlockRow( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(4), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse bulkLoadHFile( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(5), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse execCoprocessor( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(6), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.getDefaultInstance()); + } + + + public org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse multi( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(7), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.getDefaultInstance()); + } + + } + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Column_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Column_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Attribute_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Attribute_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Get_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Get_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Result_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Result_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_GetResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_GetResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Condition_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Condition_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Mutate_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Mutate_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Mutate_ColumnValue_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Mutate_ColumnValue_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Mutate_ColumnValue_QualifierValue_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MutateRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MutateRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MutateResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MutateResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Scan_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Scan_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ScanRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ScanRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ScanResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ScanResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LockRowRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LockRowRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_LockRowResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_LockRowResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UnlockRowRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UnlockRowRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UnlockRowResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UnlockRowResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BulkLoadHFileRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BulkLoadHFileRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BulkLoadHFileRequest_FamilyPath_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_BulkLoadHFileResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_BulkLoadHFileResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Parameter_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Parameter_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Property_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Property_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_Exec_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_Exec_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ExecCoprocessorRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ExecCoprocessorRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_ExecCoprocessorResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_ExecCoprocessorResponse_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_MultiResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_MultiResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\022RegionClient.proto\032\013hbase.proto\"+\n\006Col" + + "umn\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \003(\014\"(" + + "\n\tAttribute\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014" + + "\"\310\001\n\003Get\022\013\n\003row\030\001 \002(\014\022\027\n\006column\030\002 \003(\0132\007." + + "Column\022\035\n\tattribute\030\003 \003(\0132\n.Attribute\022\016\n" + + "\006lockId\030\004 \001(\004\022\032\n\006filter\030\005 \001(\0132\n.Paramete" + + "r\022\035\n\ttimeRange\030\006 \001(\0132\n.TimeRange\022\026\n\013maxV" + + "ersions\030\007 \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004t" + + "rue\"\"\n\006Result\022\030\n\005value\030\001 \003(\0132\t.KeyValue\"" + + "r\n\nGetRequest\022 \n\006region\030\001 \002(\0132\020.RegionSp", + "ecifier\022\021\n\003get\030\002 \002(\0132\004.Get\022\030\n\020closestRow" + + "Before\030\003 \001(\010\022\025\n\rexistenceOnly\030\004 \001(\010\"6\n\013G" + + "etResponse\022\027\n\006result\030\001 \001(\0132\007.Result\022\016\n\006e" + + "xists\030\002 \001(\010\"\355\003\n\tCondition\022\013\n\003row\030\001 \002(\014\022\016" + + "\n\006family\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022+\n\013com" + + "pareType\030\004 \002(\0162\026.Condition.CompareType\022)" + + "\n\ncomparator\030\005 \002(\0162\025.Condition.Comparato" + + "r\022\r\n\005value\030\006 \001(\014\"r\n\013CompareType\022\010\n\004LESS\020" + + "\000\022\021\n\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_E" + + "QUAL\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020", + "\005\022\t\n\005NO_OP\020\006\"\324\001\n\nComparator\022\025\n\021BINARY_CO" + + "MPARATOR\020\000\022\034\n\030BINARY_PREFIX_COMPARATOR\020\001" + + "\022\026\n\022BIT_AND_COMPARATOR\020\002\022\025\n\021BIT_OR_COMPA" + + "RATOR\020\003\022\026\n\022BIT_XOR_COMPARATOR\020\004\022\023\n\017NULL_" + + "COMPARATOR\020\005\022\033\n\027REGEX_STRING_COMPARATOR\020" + + "\006\022\030\n\024SUBSTRING_COMPARATOR\020\007\"\374\003\n\006Mutate\022\013" + + "\n\003row\030\001 \002(\014\022&\n\nmutateType\030\002 \002(\0162\022.Mutate" + + ".MutateType\022(\n\013columnValue\030\003 \003(\0132\023.Mutat" + + "e.ColumnValue\022\035\n\tattribute\030\004 \003(\0132\n.Attri" + + "bute\022\021\n\ttimestamp\030\005 \001(\004\022\016\n\006lockId\030\006 \001(\004\022", + "\030\n\nwriteToWAL\030\007 \001(\010:\004true\022\035\n\ttimeRange\030\n" + + " \001(\0132\n.TimeRange\032\263\001\n\013ColumnValue\022\016\n\006fami" + + "ly\030\001 \002(\014\022:\n\016qualifierValue\030\002 \003(\0132\".Mutat" + + "e.ColumnValue.QualifierValue\022\021\n\ttimestam" + + "p\030\003 \001(\004\032E\n\016QualifierValue\022\021\n\tqualifier\030\001" + + " \002(\014\022\r\n\005value\030\002 \001(\014\022\021\n\ttimestamp\030\003 \001(\004\"b" + + "\n\nMutateType\022\n\n\006APPEND\020\000\022\r\n\tINCREMENT\020\001\022" + + "\007\n\003PUT\020\002\022\n\n\006DELETE\020\003\022\021\n\rDELETE_COLUMN\020\004\022" + + "\021\n\rDELETE_FAMILY\020\005\"i\n\rMutateRequest\022 \n\006r" + + "egion\030\001 \002(\0132\020.RegionSpecifier\022\027\n\006mutate\030", + "\002 \002(\0132\007.Mutate\022\035\n\tcondition\030\003 \001(\0132\n.Cond" + + "ition\"<\n\016MutateResponse\022\027\n\006result\030\001 \001(\0132" + + "\007.Result\022\021\n\tprocessed\030\002 \001(\010\"\367\001\n\004Scan\022\027\n\006" + + "column\030\001 \003(\0132\007.Column\022\035\n\tattribute\030\002 \003(\013" + + "2\n.Attribute\022\020\n\010startRow\030\003 \001(\014\022\017\n\007stopRo" + + "w\030\004 \001(\014\022\032\n\006filter\030\005 \001(\0132\n.Parameter\022\035\n\tt" + + "imeRange\030\006 \001(\0132\n.TimeRange\022\026\n\013maxVersion" + + "s\030\007 \001(\r:\0011\022\031\n\013cacheBlocks\030\010 \001(\010:\004true\022\023\n" + + "\013rowsToCache\030\t \001(\r\022\021\n\tbatchSize\030\n \001(\r\"a\n" + + "\013ScanRequest\022\021\n\tscannerId\030\001 \001(\004\022\023\n\004scan\030", + "\002 \001(\0132\005.Scan\022\024\n\014numberOfRows\030\003 \001(\r\022\024\n\014cl" + + "oseScanner\030\004 \001(\010\"\\\n\014ScanResponse\022\027\n\006resu" + + "lt\030\001 \003(\0132\007.Result\022\021\n\tscannerId\030\002 \001(\004\022\023\n\013" + + "moreResults\030\003 \001(\010\022\013\n\003ttl\030\004 \001(\r\"?\n\016LockRo" + + "wRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + + "er\022\013\n\003row\030\002 \003(\014\".\n\017LockRowResponse\022\016\n\006lo" + + "ckId\030\001 \002(\004\022\013\n\003ttl\030\002 \001(\r\"D\n\020UnlockRowRequ" + + "est\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\016\n" + + "\006lockId\030\002 \002(\004\"\023\n\021UnlockRowResponse\"\232\001\n\024B" + + "ulkLoadHFileRequest\022 \n\006region\030\001 \002(\0132\020.Re", + "gionSpecifier\0224\n\nfamilyPath\030\002 \003(\0132 .Bulk" + + "LoadHFileRequest.FamilyPath\032*\n\nFamilyPat" + + "h\022\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025BulkL" + + "oadHFileResponse\022\016\n\006loaded\030\001 \002(\010\".\n\tPara" + + "meter\022\014\n\004type\030\001 \002(\t\022\023\n\013binaryValue\030\002 \001(\014" + + "\"\'\n\010Property\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(" + + "\t\"y\n\004Exec\022\013\n\003row\030\001 \002(\014\022\024\n\014protocolName\030\002" + + " \002(\t\022\022\n\nmethodName\030\003 \002(\t\022\033\n\010property\030\004 \003" + + "(\0132\t.Property\022\035\n\tparameter\030\005 \003(\0132\n.Param" + + "eter\"O\n\026ExecCoprocessorRequest\022 \n\006region", + "\030\001 \002(\0132\020.RegionSpecifier\022\023\n\004call\030\002 \002(\0132\005" + + ".Exec\"H\n\027ExecCoprocessorResponse\022\022\n\nregi" + + "onName\030\001 \002(\014\022\031\n\005value\030\002 \002(\0132\n.Parameter\"" + + ";\n\014MultiRequest\022\033\n\007request\030\001 \003(\0132\n.Param" + + "eter\022\016\n\006atomic\030\002 \001(\010\"-\n\rMultiResponse\022\034\n" + + "\010response\030\001 \003(\0132\n.Parameter2\227\003\n\023RegionCl" + + "ientService\022 \n\003get\022\013.GetRequest\032\014.GetRes" + + "ponse\022)\n\006mutate\022\016.MutateRequest\032\017.Mutate" + + "Response\022#\n\004scan\022\014.ScanRequest\032\r.ScanRes" + + "ponse\022,\n\007lockRow\022\017.LockRowRequest\032\020.Lock", + "RowResponse\0222\n\tunlockRow\022\021.UnlockRowRequ" + + "est\032\022.UnlockRowResponse\022>\n\rbulkLoadHFile" + + "\022\025.BulkLoadHFileRequest\032\026.BulkLoadHFileR" + + "esponse\022D\n\017execCoprocessor\022\027.ExecCoproce" + + "ssorRequest\032\030.ExecCoprocessorResponse\022&\n" + + "\005multi\022\r.MultiRequest\032\016.MultiResponseBH\n" + + "*org.apache.hadoop.hbase.protobuf.genera" + + "tedB\022RegionClientProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_Column_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_Column_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Column_descriptor, + new java.lang.String[] { "Family", "Qualifier", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Column.Builder.class); + internal_static_Attribute_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_Attribute_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Attribute_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Attribute.Builder.class); + internal_static_Get_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_Get_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Get_descriptor, + new java.lang.String[] { "Row", "Column", "Attribute", "LockId", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Get.Builder.class); + internal_static_Result_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_Result_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Result_descriptor, + new java.lang.String[] { "Value", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Result.Builder.class); + internal_static_GetRequest_descriptor = + getDescriptor().getMessageTypes().get(4); + internal_static_GetRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetRequest_descriptor, + new java.lang.String[] { "Region", "Get", "ClosestRowBefore", "ExistenceOnly", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetRequest.Builder.class); + internal_static_GetResponse_descriptor = + getDescriptor().getMessageTypes().get(5); + internal_static_GetResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_GetResponse_descriptor, + new java.lang.String[] { "Result", "Exists", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.GetResponse.Builder.class); + internal_static_Condition_descriptor = + getDescriptor().getMessageTypes().get(6); + internal_static_Condition_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Condition_descriptor, + new java.lang.String[] { "Row", "Family", "Qualifier", "CompareType", "Comparator", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Condition.Builder.class); + internal_static_Mutate_descriptor = + getDescriptor().getMessageTypes().get(7); + internal_static_Mutate_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Mutate_descriptor, + new java.lang.String[] { "Row", "MutateType", "ColumnValue", "Attribute", "Timestamp", "LockId", "WriteToWAL", "TimeRange", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.Builder.class); + internal_static_Mutate_ColumnValue_descriptor = + internal_static_Mutate_descriptor.getNestedTypes().get(0); + internal_static_Mutate_ColumnValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Mutate_ColumnValue_descriptor, + new java.lang.String[] { "Family", "QualifierValue", "Timestamp", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.Builder.class); + internal_static_Mutate_ColumnValue_QualifierValue_descriptor = + internal_static_Mutate_ColumnValue_descriptor.getNestedTypes().get(0); + internal_static_Mutate_ColumnValue_QualifierValue_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Mutate_ColumnValue_QualifierValue_descriptor, + new java.lang.String[] { "Qualifier", "Value", "Timestamp", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Mutate.ColumnValue.QualifierValue.Builder.class); + internal_static_MutateRequest_descriptor = + getDescriptor().getMessageTypes().get(8); + internal_static_MutateRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MutateRequest_descriptor, + new java.lang.String[] { "Region", "Mutate", "Condition", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateRequest.Builder.class); + internal_static_MutateResponse_descriptor = + getDescriptor().getMessageTypes().get(9); + internal_static_MutateResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MutateResponse_descriptor, + new java.lang.String[] { "Result", "Processed", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MutateResponse.Builder.class); + internal_static_Scan_descriptor = + getDescriptor().getMessageTypes().get(10); + internal_static_Scan_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Scan_descriptor, + new java.lang.String[] { "Column", "Attribute", "StartRow", "StopRow", "Filter", "TimeRange", "MaxVersions", "CacheBlocks", "RowsToCache", "BatchSize", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Scan.Builder.class); + internal_static_ScanRequest_descriptor = + getDescriptor().getMessageTypes().get(11); + internal_static_ScanRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ScanRequest_descriptor, + new java.lang.String[] { "ScannerId", "Scan", "NumberOfRows", "CloseScanner", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanRequest.Builder.class); + internal_static_ScanResponse_descriptor = + getDescriptor().getMessageTypes().get(12); + internal_static_ScanResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ScanResponse_descriptor, + new java.lang.String[] { "Result", "ScannerId", "MoreResults", "Ttl", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ScanResponse.Builder.class); + internal_static_LockRowRequest_descriptor = + getDescriptor().getMessageTypes().get(13); + internal_static_LockRowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LockRowRequest_descriptor, + new java.lang.String[] { "Region", "Row", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowRequest.Builder.class); + internal_static_LockRowResponse_descriptor = + getDescriptor().getMessageTypes().get(14); + internal_static_LockRowResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_LockRowResponse_descriptor, + new java.lang.String[] { "LockId", "Ttl", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.LockRowResponse.Builder.class); + internal_static_UnlockRowRequest_descriptor = + getDescriptor().getMessageTypes().get(15); + internal_static_UnlockRowRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UnlockRowRequest_descriptor, + new java.lang.String[] { "Region", "LockId", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowRequest.Builder.class); + internal_static_UnlockRowResponse_descriptor = + getDescriptor().getMessageTypes().get(16); + internal_static_UnlockRowResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UnlockRowResponse_descriptor, + new java.lang.String[] { }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.UnlockRowResponse.Builder.class); + internal_static_BulkLoadHFileRequest_descriptor = + getDescriptor().getMessageTypes().get(17); + internal_static_BulkLoadHFileRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BulkLoadHFileRequest_descriptor, + new java.lang.String[] { "Region", "FamilyPath", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.Builder.class); + internal_static_BulkLoadHFileRequest_FamilyPath_descriptor = + internal_static_BulkLoadHFileRequest_descriptor.getNestedTypes().get(0); + internal_static_BulkLoadHFileRequest_FamilyPath_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BulkLoadHFileRequest_FamilyPath_descriptor, + new java.lang.String[] { "Family", "Path", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileRequest.FamilyPath.Builder.class); + internal_static_BulkLoadHFileResponse_descriptor = + getDescriptor().getMessageTypes().get(18); + internal_static_BulkLoadHFileResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_BulkLoadHFileResponse_descriptor, + new java.lang.String[] { "Loaded", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.BulkLoadHFileResponse.Builder.class); + internal_static_Parameter_descriptor = + getDescriptor().getMessageTypes().get(19); + internal_static_Parameter_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Parameter_descriptor, + new java.lang.String[] { "Type", "BinaryValue", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Parameter.Builder.class); + internal_static_Property_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_Property_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Property_descriptor, + new java.lang.String[] { "Name", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Property.Builder.class); + internal_static_Exec_descriptor = + getDescriptor().getMessageTypes().get(21); + internal_static_Exec_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_Exec_descriptor, + new java.lang.String[] { "Row", "ProtocolName", "MethodName", "Property", "Parameter", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.Exec.Builder.class); + internal_static_ExecCoprocessorRequest_descriptor = + getDescriptor().getMessageTypes().get(22); + internal_static_ExecCoprocessorRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ExecCoprocessorRequest_descriptor, + new java.lang.String[] { "Region", "Call", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorRequest.Builder.class); + internal_static_ExecCoprocessorResponse_descriptor = + getDescriptor().getMessageTypes().get(23); + internal_static_ExecCoprocessorResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_ExecCoprocessorResponse_descriptor, + new java.lang.String[] { "RegionName", "Value", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.ExecCoprocessorResponse.Builder.class); + internal_static_MultiRequest_descriptor = + getDescriptor().getMessageTypes().get(24); + internal_static_MultiRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiRequest_descriptor, + new java.lang.String[] { "Request", "Atomic", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiRequest.Builder.class); + internal_static_MultiResponse_descriptor = + getDescriptor().getMessageTypes().get(25); + internal_static_MultiResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_MultiResponse_descriptor, + new java.lang.String[] { "Response", }, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.class, + org.apache.hadoop.hbase.protobuf.generated.RegionClientProtos.MultiResponse.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/src/main/java/org/apache/hadoop/hbase/protobuf/package.html b/src/main/java/org/apache/hadoop/hbase/protobuf/package.html new file mode 100644 index 00000000000..292cac55ef2 --- /dev/null +++ b/src/main/java/org/apache/hadoop/hbase/protobuf/package.html @@ -0,0 +1,30 @@ + + + + + + + +Holds classes generated from protobuf +src/main/protobuf definition files. + +

See under src/main/protobuf for instruction on how to generate the content under +the generated subpackage. +

+ + diff --git a/src/main/protobuf/README.txt b/src/main/protobuf/README.txt new file mode 100644 index 00000000000..f979619ed65 --- /dev/null +++ b/src/main/protobuf/README.txt @@ -0,0 +1,27 @@ +These are the protobuf definition files used by hbase. The produced java +classes are generated into src/main/java/org/apache/hadoop/hbase/protobuf/generated +and then checked in. The reasoning is that they change infrequently. + +To regnerate the classes after making definition file changes, ensure first that +the protobuf protoc tool is in your $PATH (You may need to download it and build +it first; its part of the protobuf package obtainable from here: +http://code.google.com/p/protobuf/downloads/list). Then run the following (You +should be able to just copy and paste the below into a terminal and hit return +-- the protoc compiler runs fast): + + UNIX_PROTO_DIR=src/main/protobuf + JAVA_DIR=src/main/java/ + mkdir -p $JAVA_DIR 2> /dev/null + if which cygpath 2> /dev/null; then + PROTO_DIR=`cygpath --windows $UNIX_PROTO_DIR` + JAVA_DIR=`cygpath --windows $JAVA_DIR` + else + PROTO_DIR=$UNIX_PROTO_DIR + fi + for PROTO_FILE in $UNIX_PROTO_DIR/*.proto + do + protoc -I$PROTO_DIR --java_out=$JAVA_DIR $PROTO_FILE + done + +After you've done the above, check it in and then check it in (or post a patch +on a JIRA with your definition file changes and the generated files). diff --git a/src/main/protobuf/RegionAdmin.proto b/src/main/protobuf/RegionAdmin.proto new file mode 100644 index 00000000000..c64d68b9441 --- /dev/null +++ b/src/main/protobuf/RegionAdmin.proto @@ -0,0 +1,236 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for RegionAdmin service. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "RegionAdminProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; + +message GetRegionInfoRequest { + required RegionSpecifier region = 1; +} + +message GetRegionInfoResponse { + required RegionInfo regionInfo = 1; +} + +/** + * Get a list of store files for a set of column families in a particular region. + * If no column family is specified, get the store files for all column families. + */ +message GetStoreFileListRequest { + required RegionSpecifier region = 1; + repeated bytes columnFamily = 2; +} + +message GetStoreFileListResponse { + repeated string storeFile = 1; +} + +message GetOnlineRegionRequest { +} + +message GetOnlineRegionResponse { + repeated RegionInfo regionInfo = 1; +} + +message OpenRegionRequest { + repeated RegionSpecifier region = 1; + optional uint32 versionOfOfflineNode = 2; +} + +message OpenRegionResponse { + repeated RegionOpeningState openingState = 1; + + enum RegionOpeningState { + OPENED = 0; + ALREADY_OPENED = 1; + FAILED_OPENING = 2; + } +} + +/** + * Closes the specified region and will use or not use ZK during the close + * according to the specified flag. + */ +message CloseRegionRequest { + required RegionSpecifier region = 1; + optional uint32 versionOfClosingNode = 2; + optional bool transitionInZK = 3 [default = true]; +} + +message CloseRegionResponse { + required bool closed = 1; +} + +/** + * Flushes the MemStore of the specified region. + *

+ * This method is synchronous. + */ +message FlushRegionRequest { + required RegionSpecifier region = 1; + optional uint64 ifOlderThanTs = 2; +} + +message FlushRegionResponse { + required uint64 lastFlushTime = 1; + optional bool flushed = 2; +} + +/** + * Splits the specified region. + *

+ * This method currently flushes the region and then forces a compaction which + * will then trigger a split. The flush is done synchronously but the + * compaction is asynchronous. + */ +message SplitRegionRequest { + required RegionSpecifier region = 1; + optional bytes splitPoint = 2; +} + +message SplitRegionResponse { +} + +/** + * Compacts the specified region. Performs a major compaction if specified. + *

+ * This method is asynchronous. + */ +message CompactRegionRequest { + required RegionSpecifier region = 1; + optional bool major = 2; +} + +message CompactRegionResponse { +} + +message UUID { + required uint64 leastSigBits = 1; + required uint64 mostSigBits = 2; +} + +// Protocol buffer version of HLog +message WALEntry { + required WALKey walKey = 1; + required WALEdit edit = 2; + + // Protocol buffer version of HLogKey + message WALKey { + required bytes encodedRegionName = 1; + required bytes tableName = 2; + required uint64 logSequenceNumber = 3; + required uint64 writeTime = 4; + optional UUID clusterId = 5; + } + + message WALEdit { + repeated KeyValue keyValue = 1; + repeated FamilyScope familyScope = 2; + + enum ScopeType { + REPLICATION_SCOPE_LOCAL = 0; + REPLICATION_SCOPE_GLOBAL = 1; + } + + message FamilyScope { + required bytes family = 1; + required ScopeType scopeType = 2; + } + } +} + +/** + * Replicates the given entries. The guarantee is that the given entries + * will be durable on the slave cluster if this method returns without + * any exception. + * hbase.replication has to be set to true for this to work. + */ +message ReplicateWALEntryRequest { + repeated WALEntry walEntry = 1; +} + +message ReplicateWALEntryResponse { +} + +// Replacement for rollHLogWriter in HRegionInterface +message RollWALWriterRequest { +} + +message RollWALWriterResponse { + // A list of encoded name of regions to flush + repeated bytes regionToFlush = 1; +} + +message StopServerRequest { + required string reason = 1; +} + +message StopServerResponse { +} + +message GetServerInfoRequest { +} + +message GetServerInfoResponse { + required ServerName serverName = 1; +} + +service RegionAdminService { + rpc getRegionInfo(GetRegionInfoRequest) + returns(GetRegionInfoResponse); + + rpc getStoreFileList(GetStoreFileListRequest) + returns(GetStoreFileListResponse); + + rpc getOnlineRegion(GetOnlineRegionRequest) + returns(GetOnlineRegionResponse); + + rpc openRegion(OpenRegionRequest) + returns(OpenRegionResponse); + + rpc closeRegion(CloseRegionRequest) + returns(CloseRegionResponse); + + rpc flushRegion(FlushRegionRequest) + returns(FlushRegionResponse); + + rpc splitRegion(SplitRegionRequest) + returns(SplitRegionResponse); + + rpc compactRegion(CompactRegionRequest) + returns(CompactRegionResponse); + + rpc replicateWALEntry(ReplicateWALEntryRequest) + returns(ReplicateWALEntryResponse); + + rpc rollWALWriter(RollWALWriterRequest) + returns(RollWALWriterResponse); + + rpc getServerInfo(GetServerInfoRequest) + returns(GetServerInfoResponse); + + rpc stopServer(StopServerRequest) + returns(StopServerResponse); +} diff --git a/src/main/protobuf/RegionClient.proto b/src/main/protobuf/RegionClient.proto new file mode 100644 index 00000000000..358382bda74 --- /dev/null +++ b/src/main/protobuf/RegionClient.proto @@ -0,0 +1,372 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are used for RegionClient service. + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "RegionClientProtos"; +option java_generic_services = true; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +import "hbase.proto"; + +/** + * Container for a list of column qualifier names of a family. + */ +message Column { + required bytes family = 1; + repeated bytes qualifier = 2; +} + +message Attribute { + required string name = 1; + optional bytes value = 2; +} + +/** + * The protocol buffer version of Get + */ +message Get { + required bytes row = 1; + repeated Column column = 2; + repeated Attribute attribute = 3; + optional uint64 lockId = 4; + optional Parameter filter = 5; + optional TimeRange timeRange = 6; + optional uint32 maxVersions = 7 [default = 1]; + optional bool cacheBlocks = 8 [default = true]; +} + +message Result { + repeated KeyValue value = 1; +} + +/** + * The get request. Perform a single Get operation. + * Unless existenceOnly is specified, return all the requested data + * for the row that matches exactly, or the one that immediately + * precedes it if closestRowBefore is specified. + * + * If existenceOnly is set, only the existence will be returned. + */ +message GetRequest { + required RegionSpecifier region = 1; + required Get get = 2; + + // If the row to get doesn't exist, return the + // closest row before. + optional bool closestRowBefore = 3; + + // The result isn't asked for, just check for + // the existence. If specified, closestRowBefore + // will be ignored + optional bool existenceOnly = 4; +} + +message GetResponse { + optional Result result = 1; + + // used for Get to check existence only + optional bool exists = 2; +} + +/** + * Condition to check if the value of a given cell (row, + * family, qualifier) matches a value via a given comparator. + * The value is optional since some comparator may not require + * a value to compare, for example, checking null. + * + * Condition is used in check and mutate operations. + */ +message Condition { + required bytes row = 1; + required bytes family = 2; + required bytes qualifier = 3; + required CompareType compareType = 4; + required Comparator comparator = 5; + optional bytes value = 6; + + enum CompareType { + LESS = 0; + LESS_OR_EQUAL = 1; + EQUAL = 2; + NOT_EQUAL = 3; + GREATER_OR_EQUAL = 4; + GREATER = 5; + NO_OP = 6; + } + + enum Comparator { + BINARY_COMPARATOR = 0; + BINARY_PREFIX_COMPARATOR = 1; + BIT_AND_COMPARATOR = 2; + BIT_OR_COMPARATOR = 3; + BIT_XOR_COMPARATOR = 4; + NULL_COMPARATOR = 5; + REGEX_STRING_COMPARATOR = 6; + SUBSTRING_COMPARATOR = 7; + } +} + +/** + * A specific mutate inside a mutate request. + * It can be an append, increment, put or delete based + * on the mutate type. + */ +message Mutate { + required bytes row = 1; + required MutateType mutateType = 2; + repeated ColumnValue columnValue = 3; + repeated Attribute attribute = 4; + optional uint64 timestamp = 5; + optional uint64 lockId = 6; + optional bool writeToWAL = 7 [default = true]; + + // For some mutate, result may be returned, in which case, + // time range can be specified for potential performance gain + optional TimeRange timeRange = 10; + + enum MutateType { + APPEND = 0; + INCREMENT = 1; + PUT = 2; + DELETE = 3; + DELETE_COLUMN = 4; + DELETE_FAMILY = 5; + } + + message ColumnValue { + required bytes family = 1; + repeated QualifierValue qualifierValue = 2; + + // Default timestamp for qalifier values, + // or timestamp of the column family to be deleted + optional uint64 timestamp = 3; + + message QualifierValue { + required bytes qualifier = 1; + optional bytes value = 2; + optional uint64 timestamp = 3; + } + } +} + +/** + * The mutate request. Perform a single Mutate operation. + * + * Optionally, you can specify a condition. The mutate + * will take place only if the condition is met. Otherwise, + * the mutate will be ignored. In the response result, + * parameter processed is used to indicate if the mutate + * actually happened. + */ +message MutateRequest { + required RegionSpecifier region = 1; + required Mutate mutate = 2; + optional Condition condition = 3; +} + +message MutateResponse { + optional Result result = 1; + + // used for mutate to indicate processed only + optional bool processed = 2; +} + +/** + * Instead of get from a table, you can scan it with optional filters. + * You can specify the row key range, time range, the columns/families + * to scan and so on. + * + * This scan is used the first time in a scan request. The response of + * the initial scan will return a scanner id, which should be used to + * fetch result batches later on before it is closed. + */ +message Scan { + repeated Column column = 1; + repeated Attribute attribute = 2; + optional bytes startRow = 3; + optional bytes stopRow = 4; + optional Parameter filter = 5; + optional TimeRange timeRange = 6; + optional uint32 maxVersions = 7 [default = 1]; + optional bool cacheBlocks = 8 [default = true]; + optional uint32 rowsToCache = 9; + optional uint32 batchSize = 10; +} + +/** + * A scan request. Initially, it should specify a scan. Later on, you + * can use the scanner id returned to fetch result batches with a different + * scan request. + * + * The scanner will remain open if there are more results, and it's not + * asked to be closed explicitly. + * + * You can fetch the results and ask the scanner to be closed to save + * a trip if you are not interested in remaining results. + */ +message ScanRequest { + optional uint64 scannerId = 1; + optional Scan scan = 2; + optional uint32 numberOfRows = 3; + optional bool closeScanner = 4; +} + +/** + * The scan response. If there are no more results, moreResults will + * be false. If it is not specified, it means there are more. + */ +message ScanResponse { + repeated Result result = 1; + optional uint64 scannerId = 2; + optional bool moreResults = 3; + optional uint32 ttl = 4; +} + +message LockRowRequest { + required RegionSpecifier region = 1; + repeated bytes row = 2; +} + +message LockRowResponse { + required uint64 lockId = 1; + optional uint32 ttl = 2; +} + +message UnlockRowRequest { + required RegionSpecifier region = 1; + required uint64 lockId = 2; +} + +message UnlockRowResponse { +} + +/** + * Atomically bulk load multiple HFiles (say from different column families) + * into an open region. + */ +message BulkLoadHFileRequest { + required RegionSpecifier region = 1; + repeated FamilyPath familyPath = 2; + + message FamilyPath { + required bytes family = 1; + required string path = 2; + } +} + +message BulkLoadHFileResponse { + required bool loaded = 1; +} + +message Parameter { + required string type = 1; + optional bytes binaryValue = 2; +} + +message Property { + required string name = 1; + required string value = 2; +} + +/** + * An individual coprocessor call. You must specify the protocol, + * the method, and the row to which the call will be executed. + * + * You can specify the configuration settings in the property list. + * + * The parameter list has the parameters used for the method. + * A parameter is a pair of parameter name and the binary parameter + * value. The name is the parameter class name. The value is the + * binary format of the parameter, for example, protocol buffer + * encoded value. + */ +message Exec { + required bytes row = 1; + required string protocolName = 2; + required string methodName = 3; + repeated Property property = 4; + repeated Parameter parameter = 5; +} + + /** + * Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol} + * method using the registered protocol handlers. + * {@link CoprocessorProtocol} implementations must be registered via the + * {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol( + * Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)} + * method before they are available. + */ +message ExecCoprocessorRequest { + required RegionSpecifier region = 1; + required Exec call = 2; +} + +message ExecCoprocessorResponse { + required bytes regionName = 1; + required Parameter value = 2; +} + +/** + * You can execute a list of actions on regions assigned + * to the same region server, if you can't find an individual + * call which meets your requirement. + * + * The multi request can have a list of requests. Each request + * should be a protocol buffer encoded request such as GetRequest, + * MutateRequest, ExecCoprocessorRequest. + * + * If the list contains multiple mutate requests only, atomic can + * be set to make sure they can be processed atomically. + */ +message MultiRequest { + repeated Parameter request = 1; + optional bool atomic = 2; +} + +message MultiResponse { + repeated Parameter response = 1; +} + +service RegionClientService { + rpc get(GetRequest) + returns(GetResponse); + + rpc mutate(MutateRequest) + returns(MutateResponse); + + rpc scan(ScanRequest) + returns(ScanResponse); + + rpc lockRow(LockRowRequest) + returns(LockRowResponse); + + rpc unlockRow(UnlockRowRequest) + returns(UnlockRowResponse); + + rpc bulkLoadHFile(BulkLoadHFileRequest) + returns(BulkLoadHFileResponse); + + rpc execCoprocessor(ExecCoprocessorRequest) + returns(ExecCoprocessorResponse); + + rpc multi(MultiRequest) + returns(MultiResponse); +} diff --git a/src/main/protobuf/hbase.proto b/src/main/protobuf/hbase.proto new file mode 100644 index 00000000000..da7878848b0 --- /dev/null +++ b/src/main/protobuf/hbase.proto @@ -0,0 +1,103 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This file contains protocol buffers that are shared throughout HBase + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "HBaseProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; + +/** + * Protocol buffer version of HRegionInfo. + */ +message RegionInfo { + required uint64 regionId = 1; + required bytes tableName = 2; + optional bytes startKey = 3; + optional bytes endKey = 4; + optional bool offline = 5; + optional bool split = 6; +} + +/** + * Container protocol buffer to specify a region. + * You can specify region by region name, or the hash + * of the region name, which is known as encoded + * region name. + */ +message RegionSpecifier { + required RegionSpecifierType type = 1; + required bytes value = 2; + + enum RegionSpecifierType { + // ,,. + REGION_NAME = 1; + + // hash of ,, + ENCODED_REGION_NAME = 2; + } +} + +/** + * A range of time. Both from and to are Java time + * stamp in milliseconds. If you don't specify a time + * range, it means all time. By default, if not + * specified, from = 0, and to = Long.MAX_VALUE + */ +message TimeRange { + optional uint64 from = 1; + optional uint64 to = 2; +} + +/** + * The type of the key in a KeyValue. + */ +enum KeyType { + MINIMUM = 0; + PUT = 4; + + DELETE = 8; + DELETE_COLUMN = 12; + DELETE_FAMILY = 14; + + // MAXIMUM is used when searching; you look from maximum on down. + MAXIMUM = 255; +} + +/** + * Protocol buffer version of KeyValue. + * It doesn't have those transient parameters + */ +message KeyValue { + required bytes row = 1; + required bytes family = 2; + required bytes qualifier = 3; + optional uint64 timestamp = 4; + optional KeyType keyType = 5; + optional bytes value = 6; +} + +/** + * Protocol buffer version of ServerName + */ +message ServerName { + required string hostName = 1; + optional uint32 port = 2; + optional uint64 startCode = 3; +}