required .hbase.pb.Scan scan = 1;
+ */
+ boolean hasScan();
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
+
+ // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ /**
+ * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ */
+ boolean hasDeleteType();
+ /**
+ * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ */
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType();
+
+ // optional uint64 timestamp = 3;
+ /**
+ * optional uint64 timestamp = 3;
+ */
+ boolean hasTimestamp();
+ /**
+ * optional uint64 timestamp = 3;
+ */
+ long getTimestamp();
+
+ // required uint32 rowBatchSize = 4;
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
+ boolean hasRowBatchSize();
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
+ int getRowBatchSize();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BulkDeleteRequest}
+ */
+ public static final class BulkDeleteRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements BulkDeleteRequestOrBuilder {
+ // Use BulkDeleteRequest.newBuilder() to construct.
+ private BulkDeleteRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private BulkDeleteRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final BulkDeleteRequest defaultInstance;
+ public static BulkDeleteRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public BulkDeleteRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private BulkDeleteRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = scan_.toBuilder();
+ }
+ scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(scan_);
+ scan_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 16: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(2, rawValue);
+ } else {
+ bitField0_ |= 0x00000002;
+ deleteType_ = value;
+ }
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ timestamp_ = input.readUInt64();
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ rowBatchSize_ = input.readUInt32();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.ParserROW = 0;
+ */
+ ROW(0, 0),
+ /**
+ * FAMILY = 1;
+ */
+ FAMILY(1, 1),
+ /**
+ * COLUMN = 2;
+ */
+ COLUMN(2, 2),
+ /**
+ * VERSION = 3;
+ */
+ VERSION(3, 3),
+ ;
+
+ /**
+ * ROW = 0;
+ */
+ public static final int ROW_VALUE = 0;
+ /**
+ * FAMILY = 1;
+ */
+ public static final int FAMILY_VALUE = 1;
+ /**
+ * COLUMN = 2;
+ */
+ public static final int COLUMN_VALUE = 2;
+ /**
+ * VERSION = 3;
+ */
+ public static final int VERSION_VALUE = 3;
+
+
+ public final int getNumber() { return value; }
+
+ public static DeleteType valueOf(int value) {
+ switch (value) {
+ case 0: return ROW;
+ case 1: return FAMILY;
+ case 2: return COLUMN;
+ case 3: return VERSION;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMaprequired .hbase.pb.Scan scan = 1;
+ */
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+ return scan_;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+ return scan_;
+ }
+
+ // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ public static final int DELETETYPE_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_;
+ /**
+ * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ */
+ public boolean hasDeleteType() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ */
+ public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() {
+ return deleteType_;
+ }
+
+ // optional uint64 timestamp = 3;
+ public static final int TIMESTAMP_FIELD_NUMBER = 3;
+ private long timestamp_;
+ /**
+ * optional uint64 timestamp = 3;
+ */
+ public boolean hasTimestamp() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * optional uint64 timestamp = 3;
+ */
+ public long getTimestamp() {
+ return timestamp_;
+ }
+
+ // required uint32 rowBatchSize = 4;
+ public static final int ROWBATCHSIZE_FIELD_NUMBER = 4;
+ private int rowBatchSize_;
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
+ public boolean hasRowBatchSize() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
+ public int getRowBatchSize() {
+ return rowBatchSize_;
+ }
+
+ private void initFields() {
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
+ timestamp_ = 0L;
+ rowBatchSize_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasScan()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasDeleteType()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasRowBatchSize()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getScan().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, scan_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeEnum(2, deleteType_.getNumber());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeUInt64(3, timestamp_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeUInt32(4, rowBatchSize_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, scan_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(2, deleteType_.getNumber());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(3, timestamp_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(4, rowBatchSize_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest) obj;
+
+ boolean result = true;
+ result = result && (hasScan() == other.hasScan());
+ if (hasScan()) {
+ result = result && getScan()
+ .equals(other.getScan());
+ }
+ result = result && (hasDeleteType() == other.hasDeleteType());
+ if (hasDeleteType()) {
+ result = result &&
+ (getDeleteType() == other.getDeleteType());
+ }
+ result = result && (hasTimestamp() == other.hasTimestamp());
+ if (hasTimestamp()) {
+ result = result && (getTimestamp()
+ == other.getTimestamp());
+ }
+ result = result && (hasRowBatchSize() == other.hasRowBatchSize());
+ if (hasRowBatchSize()) {
+ result = result && (getRowBatchSize()
+ == other.getRowBatchSize());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasScan()) {
+ hash = (37 * hash) + SCAN_FIELD_NUMBER;
+ hash = (53 * hash) + getScan().hashCode();
+ }
+ if (hasDeleteType()) {
+ hash = (37 * hash) + DELETETYPE_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnum(getDeleteType());
+ }
+ if (hasTimestamp()) {
+ hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getTimestamp());
+ }
+ if (hasRowBatchSize()) {
+ hash = (37 * hash) + ROWBATCHSIZE_FIELD_NUMBER;
+ hash = (53 * hash) + getRowBatchSize();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BulkDeleteRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired .hbase.pb.Scan scan = 1;
+ */
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+ if (scanBuilder_ == null) {
+ return scan_;
+ } else {
+ return scanBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ scan_ = value;
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public Builder setScan(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
+ if (scanBuilder_ == null) {
+ scan_ = builderForValue.build();
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
+ scan_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
+ } else {
+ scan_ = value;
+ }
+ onChanged();
+ } else {
+ scanBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public Builder clearScan() {
+ if (scanBuilder_ == null) {
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ onChanged();
+ } else {
+ scanBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getScanFieldBuilder().getBuilder();
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+ if (scanBuilder_ != null) {
+ return scanBuilder_.getMessageOrBuilder();
+ } else {
+ return scan_;
+ }
+ }
+ /**
+ * required .hbase.pb.Scan scan = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
+ getScanFieldBuilder() {
+ if (scanBuilder_ == null) {
+ scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
+ scan_,
+ getParentForChildren(),
+ isClean());
+ scan_ = null;
+ }
+ return scanBuilder_;
+ }
+
+ // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
+ /**
+ * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ */
+ public boolean hasDeleteType() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ */
+ public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() {
+ return deleteType_;
+ }
+ /**
+ * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ */
+ public Builder setDeleteType(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ deleteType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ */
+ public Builder clearDeleteType() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 timestamp = 3;
+ private long timestamp_ ;
+ /**
+ * optional uint64 timestamp = 3;
+ */
+ public boolean hasTimestamp() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * optional uint64 timestamp = 3;
+ */
+ public long getTimestamp() {
+ return timestamp_;
+ }
+ /**
+ * optional uint64 timestamp = 3;
+ */
+ public Builder setTimestamp(long value) {
+ bitField0_ |= 0x00000004;
+ timestamp_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional uint64 timestamp = 3;
+ */
+ public Builder clearTimestamp() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ timestamp_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // required uint32 rowBatchSize = 4;
+ private int rowBatchSize_ ;
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
+ public boolean hasRowBatchSize() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
+ public int getRowBatchSize() {
+ return rowBatchSize_;
+ }
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
+ public Builder setRowBatchSize(int value) {
+ bitField0_ |= 0x00000008;
+ rowBatchSize_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required uint32 rowBatchSize = 4;
+ */
+ public Builder clearRowBatchSize() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ rowBatchSize_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteRequest)
+ }
+
+ static {
+ defaultInstance = new BulkDeleteRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteRequest)
+ }
+
+ public interface BulkDeleteResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required uint64 rowsDeleted = 1;
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
+ boolean hasRowsDeleted();
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
+ long getRowsDeleted();
+
+ // optional uint64 versionsDeleted = 2;
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
+ boolean hasVersionsDeleted();
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
+ long getVersionsDeleted();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BulkDeleteResponse}
+ */
+ public static final class BulkDeleteResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements BulkDeleteResponseOrBuilder {
+ // Use BulkDeleteResponse.newBuilder() to construct.
+ private BulkDeleteResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private BulkDeleteResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final BulkDeleteResponse defaultInstance;
+ public static BulkDeleteResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public BulkDeleteResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private BulkDeleteResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ rowsDeleted_ = input.readUInt64();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ versionsDeleted_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired uint64 rowsDeleted = 1;
+ */
+ public boolean hasRowsDeleted() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
+ public long getRowsDeleted() {
+ return rowsDeleted_;
+ }
+
+ // optional uint64 versionsDeleted = 2;
+ public static final int VERSIONSDELETED_FIELD_NUMBER = 2;
+ private long versionsDeleted_;
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
+ public boolean hasVersionsDeleted() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
+ public long getVersionsDeleted() {
+ return versionsDeleted_;
+ }
+
+ private void initFields() {
+ rowsDeleted_ = 0L;
+ versionsDeleted_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRowsDeleted()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, rowsDeleted_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, versionsDeleted_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, rowsDeleted_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, versionsDeleted_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse) obj;
+
+ boolean result = true;
+ result = result && (hasRowsDeleted() == other.hasRowsDeleted());
+ if (hasRowsDeleted()) {
+ result = result && (getRowsDeleted()
+ == other.getRowsDeleted());
+ }
+ result = result && (hasVersionsDeleted() == other.hasVersionsDeleted());
+ if (hasVersionsDeleted()) {
+ result = result && (getVersionsDeleted()
+ == other.getVersionsDeleted());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRowsDeleted()) {
+ hash = (37 * hash) + ROWSDELETED_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getRowsDeleted());
+ }
+ if (hasVersionsDeleted()) {
+ hash = (37 * hash) + VERSIONSDELETED_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getVersionsDeleted());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BulkDeleteResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired uint64 rowsDeleted = 1;
+ */
+ public boolean hasRowsDeleted() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
+ public long getRowsDeleted() {
+ return rowsDeleted_;
+ }
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
+ public Builder setRowsDeleted(long value) {
+ bitField0_ |= 0x00000001;
+ rowsDeleted_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required uint64 rowsDeleted = 1;
+ */
+ public Builder clearRowsDeleted() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ rowsDeleted_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 versionsDeleted = 2;
+ private long versionsDeleted_ ;
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
+ public boolean hasVersionsDeleted() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
+ public long getVersionsDeleted() {
+ return versionsDeleted_;
+ }
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
+ public Builder setVersionsDeleted(long value) {
+ bitField0_ |= 0x00000002;
+ versionsDeleted_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional uint64 versionsDeleted = 2;
+ */
+ public Builder clearVersionsDeleted() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ versionsDeleted_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteResponse)
+ }
+
+ static {
+ defaultInstance = new BulkDeleteResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteResponse)
+ }
+
+ /**
+ * Protobuf service {@code hbase.pb.BulkDeleteService}
+ */
+ public static abstract class BulkDeleteService
+ implements com.google.protobuf.Service {
+ protected BulkDeleteService() {}
+
+ public interface Interface {
+ /**
+ * rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse);
+ */
+ public abstract void delete(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
+ com.google.protobuf.RpcCallbackrpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse);
+ */
+ public abstract void delete(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest request,
+ com.google.protobuf.RpcCallbackrequired bytes family = 1;
+ */
+ boolean hasFamily();
+ /**
+ * required bytes family = 1;
+ */
+ com.google.protobuf.ByteString getFamily();
+
+ // optional bytes qualifier = 2;
+ /**
+ * optional bytes qualifier = 2;
+ */
+ boolean hasQualifier();
+ /**
+ * optional bytes qualifier = 2;
+ */
+ com.google.protobuf.ByteString getQualifier();
+ }
+ /**
+ * Protobuf type {@code SumRequest}
+ */
+ public static final class SumRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements SumRequestOrBuilder {
+ // Use SumRequest.newBuilder() to construct.
+ private SumRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SumRequest defaultInstance;
+ public static SumRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SumRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SumRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ family_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ qualifier_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired bytes family = 1;
+ */
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+
+ // optional bytes qualifier = 2;
+ public static final int QUALIFIER_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString qualifier_;
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+
+ private void initFields() {
+ family_ = com.google.protobuf.ByteString.EMPTY;
+ qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasFamily()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, family_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, qualifier_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, family_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, qualifier_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest) obj;
+
+ boolean result = true;
+ result = result && (hasFamily() == other.hasFamily());
+ if (hasFamily()) {
+ result = result && getFamily()
+ .equals(other.getFamily());
+ }
+ result = result && (hasQualifier() == other.hasQualifier());
+ if (hasQualifier()) {
+ result = result && getQualifier()
+ .equals(other.getQualifier());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasFamily()) {
+ hash = (37 * hash) + FAMILY_FIELD_NUMBER;
+ hash = (53 * hash) + getFamily().hashCode();
+ }
+ if (hasQualifier()) {
+ hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
+ hash = (53 * hash) + getQualifier().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code SumRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired bytes family = 1;
+ */
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public Builder setFamily(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ family_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public Builder clearFamily() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ family_ = getDefaultInstance().getFamily();
+ onChanged();
+ return this;
+ }
+
+ // optional bytes qualifier = 2;
+ private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public Builder setQualifier(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ qualifier_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public Builder clearQualifier() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ qualifier_ = getDefaultInstance().getQualifier();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:SumRequest)
+ }
+
+ static {
+ defaultInstance = new SumRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:SumRequest)
+ }
+
+ public interface SumResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required int64 sum = 1;
+ /**
+ * required int64 sum = 1;
+ */
+ boolean hasSum();
+ /**
+ * required int64 sum = 1;
+ */
+ long getSum();
+ }
+ /**
+ * Protobuf type {@code SumResponse}
+ */
+ public static final class SumResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements SumResponseOrBuilder {
+ // Use SumResponse.newBuilder() to construct.
+ private SumResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SumResponse defaultInstance;
+ public static SumResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SumResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SumResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ sum_ = input.readInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired int64 sum = 1;
+ */
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required int64 sum = 1;
+ */
+ public long getSum() {
+ return sum_;
+ }
+
+ private void initFields() {
+ sum_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasSum()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeInt64(1, sum_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt64Size(1, sum_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) obj;
+
+ boolean result = true;
+ result = result && (hasSum() == other.hasSum());
+ if (hasSum()) {
+ result = result && (getSum()
+ == other.getSum());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasSum()) {
+ hash = (37 * hash) + SUM_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getSum());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code SumResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired int64 sum = 1;
+ */
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required int64 sum = 1;
+ */
+ public long getSum() {
+ return sum_;
+ }
+ /**
+ * required int64 sum = 1;
+ */
+ public Builder setSum(long value) {
+ bitField0_ |= 0x00000001;
+ sum_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required int64 sum = 1;
+ */
+ public Builder clearSum() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ sum_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:SumResponse)
+ }
+
+ static {
+ defaultInstance = new SumResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:SumResponse)
+ }
+
+ /**
+ * Protobuf service {@code ColumnAggregationService}
+ */
+ public static abstract class ColumnAggregationService
+ implements com.google.protobuf.Service {
+ protected ColumnAggregationService() {}
+
+ public interface Interface {
+ /**
+ * rpc sum(.SumRequest) returns (.SumResponse);
+ */
+ public abstract void sum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request,
+ com.google.protobuf.RpcCallbackrpc sum(.SumRequest) returns (.SumResponse);
+ */
+ public abstract void sum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request,
+ com.google.protobuf.RpcCallbackrequired bytes family = 1;
+ */
+ boolean hasFamily();
+ /**
+ * required bytes family = 1;
+ */
+ com.google.protobuf.ByteString getFamily();
+
+ // optional bytes qualifier = 2;
+ /**
+ * optional bytes qualifier = 2;
+ */
+ boolean hasQualifier();
+ /**
+ * optional bytes qualifier = 2;
+ */
+ com.google.protobuf.ByteString getQualifier();
+ }
+ /**
+ * Protobuf type {@code ColumnAggregationWithErrorsSumRequest}
+ *
+ * + * use unique names for messages in ColumnAggregationXXX.protos due to a bug in + * protoc or hadoop's protoc compiler. + *+ */ + public static final class ColumnAggregationWithErrorsSumRequest extends + com.google.protobuf.GeneratedMessage + implements ColumnAggregationWithErrorsSumRequestOrBuilder { + // Use ColumnAggregationWithErrorsSumRequest.newBuilder() to construct. + private ColumnAggregationWithErrorsSumRequest(com.google.protobuf.GeneratedMessage.Builder> builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ColumnAggregationWithErrorsSumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ColumnAggregationWithErrorsSumRequest defaultInstance; + public static ColumnAggregationWithErrorsSumRequest getDefaultInstance() { + return defaultInstance; + } + + public ColumnAggregationWithErrorsSumRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ColumnAggregationWithErrorsSumRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + qualifier_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.Builder.class); + } + + public static com.google.protobuf.Parser
required bytes family = 1;
+ */
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+
+ // optional bytes qualifier = 2;
+ public static final int QUALIFIER_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString qualifier_;
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+
+ private void initFields() {
+ family_ = com.google.protobuf.ByteString.EMPTY;
+ qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasFamily()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, family_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, qualifier_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, family_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, qualifier_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest) obj;
+
+ boolean result = true;
+ result = result && (hasFamily() == other.hasFamily());
+ if (hasFamily()) {
+ result = result && getFamily()
+ .equals(other.getFamily());
+ }
+ result = result && (hasQualifier() == other.hasQualifier());
+ if (hasQualifier()) {
+ result = result && getQualifier()
+ .equals(other.getQualifier());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasFamily()) {
+ hash = (37 * hash) + FAMILY_FIELD_NUMBER;
+ hash = (53 * hash) + getFamily().hashCode();
+ }
+ if (hasQualifier()) {
+ hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
+ hash = (53 * hash) + getQualifier().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code ColumnAggregationWithErrorsSumRequest}
+ *
+ * + * use unique names for messages in ColumnAggregationXXX.protos due to a bug in + * protoc or hadoop's protoc compiler. + *+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder
required bytes family = 1;
+ */
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public Builder setFamily(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ family_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public Builder clearFamily() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ family_ = getDefaultInstance().getFamily();
+ onChanged();
+ return this;
+ }
+
+ // optional bytes qualifier = 2;
+ private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public Builder setQualifier(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ qualifier_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public Builder clearQualifier() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ qualifier_ = getDefaultInstance().getQualifier();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ColumnAggregationWithErrorsSumRequest)
+ }
+
+ static {
+ defaultInstance = new ColumnAggregationWithErrorsSumRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ColumnAggregationWithErrorsSumRequest)
+ }
+
+ public interface ColumnAggregationWithErrorsSumResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required int64 sum = 1;
+ /**
+ * required int64 sum = 1;
+ */
+ boolean hasSum();
+ /**
+ * required int64 sum = 1;
+ */
+ long getSum();
+ }
+ /**
+ * Protobuf type {@code ColumnAggregationWithErrorsSumResponse}
+ */
+ public static final class ColumnAggregationWithErrorsSumResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements ColumnAggregationWithErrorsSumResponseOrBuilder {
+ // Use ColumnAggregationWithErrorsSumResponse.newBuilder() to construct.
+ private ColumnAggregationWithErrorsSumResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ColumnAggregationWithErrorsSumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ColumnAggregationWithErrorsSumResponse defaultInstance;
+ public static ColumnAggregationWithErrorsSumResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ColumnAggregationWithErrorsSumResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ColumnAggregationWithErrorsSumResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ sum_ = input.readInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired int64 sum = 1;
+ */
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required int64 sum = 1;
+ */
+ public long getSum() {
+ return sum_;
+ }
+
+ private void initFields() {
+ sum_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasSum()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeInt64(1, sum_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt64Size(1, sum_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse) obj;
+
+ boolean result = true;
+ result = result && (hasSum() == other.hasSum());
+ if (hasSum()) {
+ result = result && (getSum()
+ == other.getSum());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasSum()) {
+ hash = (37 * hash) + SUM_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getSum());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code ColumnAggregationWithErrorsSumResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired int64 sum = 1;
+ */
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required int64 sum = 1;
+ */
+ public long getSum() {
+ return sum_;
+ }
+ /**
+ * required int64 sum = 1;
+ */
+ public Builder setSum(long value) {
+ bitField0_ |= 0x00000001;
+ sum_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required int64 sum = 1;
+ */
+ public Builder clearSum() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ sum_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ColumnAggregationWithErrorsSumResponse)
+ }
+
+ static {
+ defaultInstance = new ColumnAggregationWithErrorsSumResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ColumnAggregationWithErrorsSumResponse)
+ }
+
+ /**
+ * Protobuf service {@code ColumnAggregationServiceWithErrors}
+ */
+ public static abstract class ColumnAggregationServiceWithErrors
+ implements com.google.protobuf.Service {
+ protected ColumnAggregationServiceWithErrors() {}
+
+ public interface Interface {
+ /**
+ * rpc sum(.ColumnAggregationWithErrorsSumRequest) returns (.ColumnAggregationWithErrorsSumResponse);
+ */
+ public abstract void sum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request,
+ com.google.protobuf.RpcCallbackrpc sum(.ColumnAggregationWithErrorsSumRequest) returns (.ColumnAggregationWithErrorsSumResponse);
+ */
+ public abstract void sum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request,
+ com.google.protobuf.RpcCallbackrequired bytes family = 1;
+ */
+ boolean hasFamily();
+ /**
+ * required bytes family = 1;
+ */
+ com.google.protobuf.ByteString getFamily();
+
+ // optional bytes qualifier = 2;
+ /**
+ * optional bytes qualifier = 2;
+ */
+ boolean hasQualifier();
+ /**
+ * optional bytes qualifier = 2;
+ */
+ com.google.protobuf.ByteString getQualifier();
+ }
+ /**
+ * Protobuf type {@code ColumnAggregationNullResponseSumRequest}
+ *
+ * + * use unique names for messages in ColumnAggregationXXX.protos due to a bug in + * protoc or hadoop's protoc compiler. + *+ */ + public static final class ColumnAggregationNullResponseSumRequest extends + com.google.protobuf.GeneratedMessage + implements ColumnAggregationNullResponseSumRequestOrBuilder { + // Use ColumnAggregationNullResponseSumRequest.newBuilder() to construct. + private ColumnAggregationNullResponseSumRequest(com.google.protobuf.GeneratedMessage.Builder> builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ColumnAggregationNullResponseSumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ColumnAggregationNullResponseSumRequest defaultInstance; + public static ColumnAggregationNullResponseSumRequest getDefaultInstance() { + return defaultInstance; + } + + public ColumnAggregationNullResponseSumRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ColumnAggregationNullResponseSumRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + qualifier_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest.Builder.class); + } + + public static com.google.protobuf.Parser
required bytes family = 1;
+ */
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+
+ // optional bytes qualifier = 2;
+ public static final int QUALIFIER_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString qualifier_;
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+
+ private void initFields() {
+ family_ = com.google.protobuf.ByteString.EMPTY;
+ qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasFamily()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, family_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, qualifier_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, family_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, qualifier_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest) obj;
+
+ boolean result = true;
+ result = result && (hasFamily() == other.hasFamily());
+ if (hasFamily()) {
+ result = result && getFamily()
+ .equals(other.getFamily());
+ }
+ result = result && (hasQualifier() == other.hasQualifier());
+ if (hasQualifier()) {
+ result = result && getQualifier()
+ .equals(other.getQualifier());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasFamily()) {
+ hash = (37 * hash) + FAMILY_FIELD_NUMBER;
+ hash = (53 * hash) + getFamily().hashCode();
+ }
+ if (hasQualifier()) {
+ hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
+ hash = (53 * hash) + getQualifier().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code ColumnAggregationNullResponseSumRequest}
+ *
+ * + * use unique names for messages in ColumnAggregationXXX.protos due to a bug in + * protoc or hadoop's protoc compiler. + *+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder
required bytes family = 1;
+ */
+ public boolean hasFamily() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public com.google.protobuf.ByteString getFamily() {
+ return family_;
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public Builder setFamily(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ family_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes family = 1;
+ */
+ public Builder clearFamily() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ family_ = getDefaultInstance().getFamily();
+ onChanged();
+ return this;
+ }
+
+ // optional bytes qualifier = 2;
+ private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public boolean hasQualifier() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public com.google.protobuf.ByteString getQualifier() {
+ return qualifier_;
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public Builder setQualifier(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ qualifier_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bytes qualifier = 2;
+ */
+ public Builder clearQualifier() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ qualifier_ = getDefaultInstance().getQualifier();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ColumnAggregationNullResponseSumRequest)
+ }
+
+ static {
+ defaultInstance = new ColumnAggregationNullResponseSumRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ColumnAggregationNullResponseSumRequest)
+ }
+
+ public interface ColumnAggregationNullResponseSumResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional int64 sum = 1;
+ /**
+ * optional int64 sum = 1;
+ */
+ boolean hasSum();
+ /**
+ * optional int64 sum = 1;
+ */
+ long getSum();
+ }
+ /**
+ * Protobuf type {@code ColumnAggregationNullResponseSumResponse}
+ */
+ public static final class ColumnAggregationNullResponseSumResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements ColumnAggregationNullResponseSumResponseOrBuilder {
+ // Use ColumnAggregationNullResponseSumResponse.newBuilder() to construct.
+ private ColumnAggregationNullResponseSumResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ColumnAggregationNullResponseSumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ColumnAggregationNullResponseSumResponse defaultInstance;
+ public static ColumnAggregationNullResponseSumResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ColumnAggregationNullResponseSumResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ColumnAggregationNullResponseSumResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ sum_ = input.readInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.internal_static_ColumnAggregationNullResponseSumResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parseroptional int64 sum = 1;
+ */
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional int64 sum = 1;
+ */
+ public long getSum() {
+ return sum_;
+ }
+
+ private void initFields() {
+ sum_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeInt64(1, sum_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt64Size(1, sum_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse) obj;
+
+ boolean result = true;
+ result = result && (hasSum() == other.hasSum());
+ if (hasSum()) {
+ result = result && (getSum()
+ == other.getSum());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasSum()) {
+ hash = (37 * hash) + SUM_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getSum());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code ColumnAggregationNullResponseSumResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderoptional int64 sum = 1;
+ */
+ public boolean hasSum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional int64 sum = 1;
+ */
+ public long getSum() {
+ return sum_;
+ }
+ /**
+ * optional int64 sum = 1;
+ */
+ public Builder setSum(long value) {
+ bitField0_ |= 0x00000001;
+ sum_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional int64 sum = 1;
+ */
+ public Builder clearSum() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ sum_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ColumnAggregationNullResponseSumResponse)
+ }
+
+ static {
+ defaultInstance = new ColumnAggregationNullResponseSumResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ColumnAggregationNullResponseSumResponse)
+ }
+
+ /**
+ * Protobuf service {@code ColumnAggregationServiceNullResponse}
+ */
+ public static abstract class ColumnAggregationServiceNullResponse
+ implements com.google.protobuf.Service {
+ protected ColumnAggregationServiceNullResponse() {}
+
+ public interface Interface {
+ /**
+ * rpc sum(.ColumnAggregationNullResponseSumRequest) returns (.ColumnAggregationNullResponseSumResponse);
+ */
+ public abstract void sum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest request,
+ com.google.protobuf.RpcCallbackrpc sum(.ColumnAggregationNullResponseSumRequest) returns (.ColumnAggregationNullResponseSumResponse);
+ */
+ public abstract void sum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithNullResponseProtos.ColumnAggregationNullResponseSumRequest request,
+ com.google.protobuf.RpcCallbackrequired string value = 1;
+ */
+ boolean hasValue();
+ /**
+ * required string value = 1;
+ */
+ java.lang.String getValue();
+ /**
+ * required string value = 1;
+ */
+ com.google.protobuf.ByteString
+ getValueBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.test.pb.DummyResponse}
+ */
+ public static final class DummyResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements DummyResponseOrBuilder {
+ // Use DummyResponse.newBuilder() to construct.
+ private DummyResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private DummyResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final DummyResponse defaultInstance;
+ public static DummyResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public DummyResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private DummyResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ value_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.internal_static_hbase_test_pb_DummyResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired string value = 1;
+ */
+ public boolean hasValue() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string value = 1;
+ */
+ public java.lang.String getValue() {
+ java.lang.Object ref = value_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ value_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string value = 1;
+ */
+ public com.google.protobuf.ByteString
+ getValueBytes() {
+ java.lang.Object ref = value_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ value_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ value_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasValue()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getValueBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getValueBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse) obj;
+
+ boolean result = true;
+ result = result && (hasValue() == other.hasValue());
+ if (hasValue()) {
+ result = result && getValue()
+ .equals(other.getValue());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasValue()) {
+ hash = (37 * hash) + VALUE_FIELD_NUMBER;
+ hash = (53 * hash) + getValue().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.test.pb.DummyResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired string value = 1;
+ */
+ public boolean hasValue() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required string value = 1;
+ */
+ public java.lang.String getValue() {
+ java.lang.Object ref = value_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ value_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string value = 1;
+ */
+ public com.google.protobuf.ByteString
+ getValueBytes() {
+ java.lang.Object ref = value_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ value_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string value = 1;
+ */
+ public Builder setValue(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ value_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string value = 1;
+ */
+ public Builder clearValue() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ value_ = getDefaultInstance().getValue();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string value = 1;
+ */
+ public Builder setValueBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ value_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.test.pb.DummyResponse)
+ }
+
+ static {
+ defaultInstance = new DummyResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.test.pb.DummyResponse)
+ }
+
+ /**
+ * Protobuf service {@code hbase.test.pb.DummyService}
+ */
+ public static abstract class DummyService
+ implements com.google.protobuf.Service {
+ protected DummyService() {}
+
+ public interface Interface {
+ /**
+ * rpc dummyCall(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);
+ */
+ public abstract void dummyCall(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request,
+ com.google.protobuf.RpcCallbackrpc dummyThrow(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);
+ */
+ public abstract void dummyThrow(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request,
+ com.google.protobuf.RpcCallbackrpc dummyCall(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);
+ */
+ public abstract void dummyCall(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request,
+ com.google.protobuf.RpcCallbackrpc dummyThrow(.hbase.test.pb.DummyRequest) returns (.hbase.test.pb.DummyResponse);
+ */
+ public abstract void dummyThrow(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos.DummyRequest request,
+ com.google.protobuf.RpcCallbackrequired bytes row = 1;
+ */
+ boolean hasRow();
+ /**
+ * required bytes row = 1;
+ */
+ com.google.protobuf.ByteString getRow();
+
+ // required int32 counter = 2;
+ /**
+ * required int32 counter = 2;
+ */
+ boolean hasCounter();
+ /**
+ * required int32 counter = 2;
+ */
+ int getCounter();
+ }
+ /**
+ * Protobuf type {@code IncCounterProcessorRequest}
+ */
+ public static final class IncCounterProcessorRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements IncCounterProcessorRequestOrBuilder {
+ // Use IncCounterProcessorRequest.newBuilder() to construct.
+ private IncCounterProcessorRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private IncCounterProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final IncCounterProcessorRequest defaultInstance;
+ public static IncCounterProcessorRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public IncCounterProcessorRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private IncCounterProcessorRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ row_ = input.readBytes();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ counter_ = input.readInt32();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired bytes row = 1;
+ */
+ public boolean hasRow() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes row = 1;
+ */
+ public com.google.protobuf.ByteString getRow() {
+ return row_;
+ }
+
+ // required int32 counter = 2;
+ public static final int COUNTER_FIELD_NUMBER = 2;
+ private int counter_;
+ /**
+ * required int32 counter = 2;
+ */
+ public boolean hasCounter() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required int32 counter = 2;
+ */
+ public int getCounter() {
+ return counter_;
+ }
+
+ private void initFields() {
+ row_ = com.google.protobuf.ByteString.EMPTY;
+ counter_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRow()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasCounter()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, row_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeInt32(2, counter_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, row_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(2, counter_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest) obj;
+
+ boolean result = true;
+ result = result && (hasRow() == other.hasRow());
+ if (hasRow()) {
+ result = result && getRow()
+ .equals(other.getRow());
+ }
+ result = result && (hasCounter() == other.hasCounter());
+ if (hasCounter()) {
+ result = result && (getCounter()
+ == other.getCounter());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRow()) {
+ hash = (37 * hash) + ROW_FIELD_NUMBER;
+ hash = (53 * hash) + getRow().hashCode();
+ }
+ if (hasCounter()) {
+ hash = (37 * hash) + COUNTER_FIELD_NUMBER;
+ hash = (53 * hash) + getCounter();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code IncCounterProcessorRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired bytes row = 1;
+ */
+ public boolean hasRow() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes row = 1;
+ */
+ public com.google.protobuf.ByteString getRow() {
+ return row_;
+ }
+ /**
+ * required bytes row = 1;
+ */
+ public Builder setRow(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ row_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes row = 1;
+ */
+ public Builder clearRow() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ row_ = getDefaultInstance().getRow();
+ onChanged();
+ return this;
+ }
+
+ // required int32 counter = 2;
+ private int counter_ ;
+ /**
+ * required int32 counter = 2;
+ */
+ public boolean hasCounter() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required int32 counter = 2;
+ */
+ public int getCounter() {
+ return counter_;
+ }
+ /**
+ * required int32 counter = 2;
+ */
+ public Builder setCounter(int value) {
+ bitField0_ |= 0x00000002;
+ counter_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required int32 counter = 2;
+ */
+ public Builder clearCounter() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ counter_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:IncCounterProcessorRequest)
+ }
+
+ static {
+ defaultInstance = new IncCounterProcessorRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:IncCounterProcessorRequest)
+ }
+
+ public interface IncCounterProcessorResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required int32 response = 1;
+ /**
+ * required int32 response = 1;
+ */
+ boolean hasResponse();
+ /**
+ * required int32 response = 1;
+ */
+ int getResponse();
+ }
+ /**
+ * Protobuf type {@code IncCounterProcessorResponse}
+ */
+ public static final class IncCounterProcessorResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements IncCounterProcessorResponseOrBuilder {
+ // Use IncCounterProcessorResponse.newBuilder() to construct.
+ private IncCounterProcessorResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private IncCounterProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final IncCounterProcessorResponse defaultInstance;
+ public static IncCounterProcessorResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public IncCounterProcessorResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private IncCounterProcessorResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ response_ = input.readInt32();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_IncCounterProcessorResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired int32 response = 1;
+ */
+ public boolean hasResponse() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required int32 response = 1;
+ */
+ public int getResponse() {
+ return response_;
+ }
+
+ private void initFields() {
+ response_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasResponse()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeInt32(1, response_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt32Size(1, response_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse) obj;
+
+ boolean result = true;
+ result = result && (hasResponse() == other.hasResponse());
+ if (hasResponse()) {
+ result = result && (getResponse()
+ == other.getResponse());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasResponse()) {
+ hash = (37 * hash) + RESPONSE_FIELD_NUMBER;
+ hash = (53 * hash) + getResponse();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.IncCounterProcessorResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code IncCounterProcessorResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired int32 response = 1;
+ */
+ public boolean hasResponse() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required int32 response = 1;
+ */
+ public int getResponse() {
+ return response_;
+ }
+ /**
+ * required int32 response = 1;
+ */
+ public Builder setResponse(int value) {
+ bitField0_ |= 0x00000001;
+ response_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required int32 response = 1;
+ */
+ public Builder clearResponse() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ response_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:IncCounterProcessorResponse)
+ }
+
+ static {
+ defaultInstance = new IncCounterProcessorResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:IncCounterProcessorResponse)
+ }
+
+ public interface FriendsOfFriendsProcessorRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes person = 1;
+ /**
+ * required bytes person = 1;
+ */
+ boolean hasPerson();
+ /**
+ * required bytes person = 1;
+ */
+ com.google.protobuf.ByteString getPerson();
+
+ // required bytes row = 2;
+ /**
+ * required bytes row = 2;
+ */
+ boolean hasRow();
+ /**
+ * required bytes row = 2;
+ */
+ com.google.protobuf.ByteString getRow();
+
+ // repeated string result = 3;
+ /**
+ * repeated string result = 3;
+ */
+ java.util.Listrepeated string result = 3;
+ */
+ int getResultCount();
+ /**
+ * repeated string result = 3;
+ */
+ java.lang.String getResult(int index);
+ /**
+ * repeated string result = 3;
+ */
+ com.google.protobuf.ByteString
+ getResultBytes(int index);
+ }
+ /**
+ * Protobuf type {@code FriendsOfFriendsProcessorRequest}
+ */
+ public static final class FriendsOfFriendsProcessorRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements FriendsOfFriendsProcessorRequestOrBuilder {
+ // Use FriendsOfFriendsProcessorRequest.newBuilder() to construct.
+ private FriendsOfFriendsProcessorRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private FriendsOfFriendsProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final FriendsOfFriendsProcessorRequest defaultInstance;
+ public static FriendsOfFriendsProcessorRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public FriendsOfFriendsProcessorRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private FriendsOfFriendsProcessorRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ person_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ row_ = input.readBytes();
+ break;
+ }
+ case 26: {
+ if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ result_ = new com.google.protobuf.LazyStringArrayList();
+ mutable_bitField0_ |= 0x00000004;
+ }
+ result_.add(input.readBytes());
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
+ result_ = new com.google.protobuf.UnmodifiableLazyStringList(result_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired bytes person = 1;
+ */
+ public boolean hasPerson() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes person = 1;
+ */
+ public com.google.protobuf.ByteString getPerson() {
+ return person_;
+ }
+
+ // required bytes row = 2;
+ public static final int ROW_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString row_;
+ /**
+ * required bytes row = 2;
+ */
+ public boolean hasRow() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required bytes row = 2;
+ */
+ public com.google.protobuf.ByteString getRow() {
+ return row_;
+ }
+
+ // repeated string result = 3;
+ public static final int RESULT_FIELD_NUMBER = 3;
+ private com.google.protobuf.LazyStringList result_;
+ /**
+ * repeated string result = 3;
+ */
+ public java.util.Listrepeated string result = 3;
+ */
+ public int getResultCount() {
+ return result_.size();
+ }
+ /**
+ * repeated string result = 3;
+ */
+ public java.lang.String getResult(int index) {
+ return result_.get(index);
+ }
+ /**
+ * repeated string result = 3;
+ */
+ public com.google.protobuf.ByteString
+ getResultBytes(int index) {
+ return result_.getByteString(index);
+ }
+
+ private void initFields() {
+ person_ = com.google.protobuf.ByteString.EMPTY;
+ row_ = com.google.protobuf.ByteString.EMPTY;
+ result_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasPerson()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasRow()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, person_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, row_);
+ }
+ for (int i = 0; i < result_.size(); i++) {
+ output.writeBytes(3, result_.getByteString(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, person_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, row_);
+ }
+ {
+ int dataSize = 0;
+ for (int i = 0; i < result_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(result_.getByteString(i));
+ }
+ size += dataSize;
+ size += 1 * getResultList().size();
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest) obj;
+
+ boolean result = true;
+ result = result && (hasPerson() == other.hasPerson());
+ if (hasPerson()) {
+ result = result && getPerson()
+ .equals(other.getPerson());
+ }
+ result = result && (hasRow() == other.hasRow());
+ if (hasRow()) {
+ result = result && getRow()
+ .equals(other.getRow());
+ }
+ result = result && getResultList()
+ .equals(other.getResultList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasPerson()) {
+ hash = (37 * hash) + PERSON_FIELD_NUMBER;
+ hash = (53 * hash) + getPerson().hashCode();
+ }
+ if (hasRow()) {
+ hash = (37 * hash) + ROW_FIELD_NUMBER;
+ hash = (53 * hash) + getRow().hashCode();
+ }
+ if (getResultCount() > 0) {
+ hash = (37 * hash) + RESULT_FIELD_NUMBER;
+ hash = (53 * hash) + getResultList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code FriendsOfFriendsProcessorRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired bytes person = 1;
+ */
+ public boolean hasPerson() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes person = 1;
+ */
+ public com.google.protobuf.ByteString getPerson() {
+ return person_;
+ }
+ /**
+ * required bytes person = 1;
+ */
+ public Builder setPerson(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ person_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes person = 1;
+ */
+ public Builder clearPerson() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ person_ = getDefaultInstance().getPerson();
+ onChanged();
+ return this;
+ }
+
+ // required bytes row = 2;
+ private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * required bytes row = 2;
+ */
+ public boolean hasRow() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required bytes row = 2;
+ */
+ public com.google.protobuf.ByteString getRow() {
+ return row_;
+ }
+ /**
+ * required bytes row = 2;
+ */
+ public Builder setRow(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ row_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes row = 2;
+ */
+ public Builder clearRow() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ row_ = getDefaultInstance().getRow();
+ onChanged();
+ return this;
+ }
+
+ // repeated string result = 3;
+ private com.google.protobuf.LazyStringList result_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ private void ensureResultIsMutable() {
+ if (!((bitField0_ & 0x00000004) == 0x00000004)) {
+ result_ = new com.google.protobuf.LazyStringArrayList(result_);
+ bitField0_ |= 0x00000004;
+ }
+ }
+ /**
+ * repeated string result = 3;
+ */
+ public java.util.Listrepeated string result = 3;
+ */
+ public int getResultCount() {
+ return result_.size();
+ }
+ /**
+ * repeated string result = 3;
+ */
+ public java.lang.String getResult(int index) {
+ return result_.get(index);
+ }
+ /**
+ * repeated string result = 3;
+ */
+ public com.google.protobuf.ByteString
+ getResultBytes(int index) {
+ return result_.getByteString(index);
+ }
+ /**
+ * repeated string result = 3;
+ */
+ public Builder setResult(
+ int index, java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureResultIsMutable();
+ result_.set(index, value);
+ onChanged();
+ return this;
+ }
+ /**
+ * repeated string result = 3;
+ */
+ public Builder addResult(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureResultIsMutable();
+ result_.add(value);
+ onChanged();
+ return this;
+ }
+ /**
+ * repeated string result = 3;
+ */
+ public Builder addAllResult(
+ java.lang.Iterablerepeated string result = 3;
+ */
+ public Builder clearResult() {
+ result_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ onChanged();
+ return this;
+ }
+ /**
+ * repeated string result = 3;
+ */
+ public Builder addResultBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureResultIsMutable();
+ result_.add(value);
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:FriendsOfFriendsProcessorRequest)
+ }
+
+ static {
+ defaultInstance = new FriendsOfFriendsProcessorRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:FriendsOfFriendsProcessorRequest)
+ }
+
+ public interface FriendsOfFriendsProcessorResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated string result = 1;
+ /**
+ * repeated string result = 1;
+ */
+ java.util.Listrepeated string result = 1;
+ */
+ int getResultCount();
+ /**
+ * repeated string result = 1;
+ */
+ java.lang.String getResult(int index);
+ /**
+ * repeated string result = 1;
+ */
+ com.google.protobuf.ByteString
+ getResultBytes(int index);
+ }
+ /**
+ * Protobuf type {@code FriendsOfFriendsProcessorResponse}
+ */
+ public static final class FriendsOfFriendsProcessorResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements FriendsOfFriendsProcessorResponseOrBuilder {
+ // Use FriendsOfFriendsProcessorResponse.newBuilder() to construct.
+ private FriendsOfFriendsProcessorResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private FriendsOfFriendsProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final FriendsOfFriendsProcessorResponse defaultInstance;
+ public static FriendsOfFriendsProcessorResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public FriendsOfFriendsProcessorResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private FriendsOfFriendsProcessorResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ result_ = new com.google.protobuf.LazyStringArrayList();
+ mutable_bitField0_ |= 0x00000001;
+ }
+ result_.add(input.readBytes());
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ result_ = new com.google.protobuf.UnmodifiableLazyStringList(result_);
+ }
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_FriendsOfFriendsProcessorResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrepeated string result = 1;
+ */
+ public java.util.Listrepeated string result = 1;
+ */
+ public int getResultCount() {
+ return result_.size();
+ }
+ /**
+ * repeated string result = 1;
+ */
+ public java.lang.String getResult(int index) {
+ return result_.get(index);
+ }
+ /**
+ * repeated string result = 1;
+ */
+ public com.google.protobuf.ByteString
+ getResultBytes(int index) {
+ return result_.getByteString(index);
+ }
+
+ private void initFields() {
+ result_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < result_.size(); i++) {
+ output.writeBytes(1, result_.getByteString(i));
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < result_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(result_.getByteString(i));
+ }
+ size += dataSize;
+ size += 1 * getResultList().size();
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse) obj;
+
+ boolean result = true;
+ result = result && getResultList()
+ .equals(other.getResultList());
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getResultCount() > 0) {
+ hash = (37 * hash) + RESULT_FIELD_NUMBER;
+ hash = (53 * hash) + getResultList().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.FriendsOfFriendsProcessorResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code FriendsOfFriendsProcessorResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrepeated string result = 1;
+ */
+ public java.util.Listrepeated string result = 1;
+ */
+ public int getResultCount() {
+ return result_.size();
+ }
+ /**
+ * repeated string result = 1;
+ */
+ public java.lang.String getResult(int index) {
+ return result_.get(index);
+ }
+ /**
+ * repeated string result = 1;
+ */
+ public com.google.protobuf.ByteString
+ getResultBytes(int index) {
+ return result_.getByteString(index);
+ }
+ /**
+ * repeated string result = 1;
+ */
+ public Builder setResult(
+ int index, java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureResultIsMutable();
+ result_.set(index, value);
+ onChanged();
+ return this;
+ }
+ /**
+ * repeated string result = 1;
+ */
+ public Builder addResult(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureResultIsMutable();
+ result_.add(value);
+ onChanged();
+ return this;
+ }
+ /**
+ * repeated string result = 1;
+ */
+ public Builder addAllResult(
+ java.lang.Iterablerepeated string result = 1;
+ */
+ public Builder clearResult() {
+ result_ = com.google.protobuf.LazyStringArrayList.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ return this;
+ }
+ /**
+ * repeated string result = 1;
+ */
+ public Builder addResultBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureResultIsMutable();
+ result_.add(value);
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:FriendsOfFriendsProcessorResponse)
+ }
+
+ static {
+ defaultInstance = new FriendsOfFriendsProcessorResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:FriendsOfFriendsProcessorResponse)
+ }
+
+ public interface RowSwapProcessorRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bytes row1 = 1;
+ /**
+ * required bytes row1 = 1;
+ */
+ boolean hasRow1();
+ /**
+ * required bytes row1 = 1;
+ */
+ com.google.protobuf.ByteString getRow1();
+
+ // required bytes row2 = 2;
+ /**
+ * required bytes row2 = 2;
+ */
+ boolean hasRow2();
+ /**
+ * required bytes row2 = 2;
+ */
+ com.google.protobuf.ByteString getRow2();
+ }
+ /**
+ * Protobuf type {@code RowSwapProcessorRequest}
+ */
+ public static final class RowSwapProcessorRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements RowSwapProcessorRequestOrBuilder {
+ // Use RowSwapProcessorRequest.newBuilder() to construct.
+ private RowSwapProcessorRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private RowSwapProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final RowSwapProcessorRequest defaultInstance;
+ public static RowSwapProcessorRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RowSwapProcessorRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private RowSwapProcessorRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ row1_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ row2_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired bytes row1 = 1;
+ */
+ public boolean hasRow1() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes row1 = 1;
+ */
+ public com.google.protobuf.ByteString getRow1() {
+ return row1_;
+ }
+
+ // required bytes row2 = 2;
+ public static final int ROW2_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString row2_;
+ /**
+ * required bytes row2 = 2;
+ */
+ public boolean hasRow2() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required bytes row2 = 2;
+ */
+ public com.google.protobuf.ByteString getRow2() {
+ return row2_;
+ }
+
+ private void initFields() {
+ row1_ = com.google.protobuf.ByteString.EMPTY;
+ row2_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRow1()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasRow2()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, row1_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, row2_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, row1_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, row2_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest) obj;
+
+ boolean result = true;
+ result = result && (hasRow1() == other.hasRow1());
+ if (hasRow1()) {
+ result = result && getRow1()
+ .equals(other.getRow1());
+ }
+ result = result && (hasRow2() == other.hasRow2());
+ if (hasRow2()) {
+ result = result && getRow2()
+ .equals(other.getRow2());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRow1()) {
+ hash = (37 * hash) + ROW1_FIELD_NUMBER;
+ hash = (53 * hash) + getRow1().hashCode();
+ }
+ if (hasRow2()) {
+ hash = (37 * hash) + ROW2_FIELD_NUMBER;
+ hash = (53 * hash) + getRow2().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code RowSwapProcessorRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired bytes row1 = 1;
+ */
+ public boolean hasRow1() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes row1 = 1;
+ */
+ public com.google.protobuf.ByteString getRow1() {
+ return row1_;
+ }
+ /**
+ * required bytes row1 = 1;
+ */
+ public Builder setRow1(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ row1_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes row1 = 1;
+ */
+ public Builder clearRow1() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ row1_ = getDefaultInstance().getRow1();
+ onChanged();
+ return this;
+ }
+
+ // required bytes row2 = 2;
+ private com.google.protobuf.ByteString row2_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * required bytes row2 = 2;
+ */
+ public boolean hasRow2() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required bytes row2 = 2;
+ */
+ public com.google.protobuf.ByteString getRow2() {
+ return row2_;
+ }
+ /**
+ * required bytes row2 = 2;
+ */
+ public Builder setRow2(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ row2_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes row2 = 2;
+ */
+ public Builder clearRow2() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ row2_ = getDefaultInstance().getRow2();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:RowSwapProcessorRequest)
+ }
+
+ static {
+ defaultInstance = new RowSwapProcessorRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:RowSwapProcessorRequest)
+ }
+
+ public interface RowSwapProcessorResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code RowSwapProcessorResponse}
+ */
+ public static final class RowSwapProcessorResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements RowSwapProcessorResponseOrBuilder {
+ // Use RowSwapProcessorResponse.newBuilder() to construct.
+ private RowSwapProcessorResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private RowSwapProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final RowSwapProcessorResponse defaultInstance;
+ public static RowSwapProcessorResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public RowSwapProcessorResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private RowSwapProcessorResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_RowSwapProcessorResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.RowSwapProcessorResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired bytes row = 1;
+ */
+ boolean hasRow();
+ /**
+ * required bytes row = 1;
+ */
+ com.google.protobuf.ByteString getRow();
+ }
+ /**
+ * Protobuf type {@code TimeoutProcessorRequest}
+ */
+ public static final class TimeoutProcessorRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements TimeoutProcessorRequestOrBuilder {
+ // Use TimeoutProcessorRequest.newBuilder() to construct.
+ private TimeoutProcessorRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private TimeoutProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final TimeoutProcessorRequest defaultInstance;
+ public static TimeoutProcessorRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public TimeoutProcessorRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private TimeoutProcessorRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ row_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired bytes row = 1;
+ */
+ public boolean hasRow() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes row = 1;
+ */
+ public com.google.protobuf.ByteString getRow() {
+ return row_;
+ }
+
+ private void initFields() {
+ row_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRow()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, row_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, row_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest) obj;
+
+ boolean result = true;
+ result = result && (hasRow() == other.hasRow());
+ if (hasRow()) {
+ result = result && getRow()
+ .equals(other.getRow());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRow()) {
+ hash = (37 * hash) + ROW_FIELD_NUMBER;
+ hash = (53 * hash) + getRow().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code TimeoutProcessorRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired bytes row = 1;
+ */
+ public boolean hasRow() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bytes row = 1;
+ */
+ public com.google.protobuf.ByteString getRow() {
+ return row_;
+ }
+ /**
+ * required bytes row = 1;
+ */
+ public Builder setRow(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ row_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes row = 1;
+ */
+ public Builder clearRow() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ row_ = getDefaultInstance().getRow();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:TimeoutProcessorRequest)
+ }
+
+ static {
+ defaultInstance = new TimeoutProcessorRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:TimeoutProcessorRequest)
+ }
+
+ public interface TimeoutProcessorResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ /**
+ * Protobuf type {@code TimeoutProcessorResponse}
+ */
+ public static final class TimeoutProcessorResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements TimeoutProcessorResponseOrBuilder {
+ // Use TimeoutProcessorResponse.newBuilder() to construct.
+ private TimeoutProcessorResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private TimeoutProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final TimeoutProcessorResponse defaultInstance;
+ public static TimeoutProcessorResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public TimeoutProcessorResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private TimeoutProcessorResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.internal_static_TimeoutProcessorResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.IncrementCounterProcessorTestProtos.TimeoutProcessorResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + boolean hasInterpreterClassName(); + /** + *
required string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + java.lang.String getInterpreterClassName(); + /** + *
required string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + com.google.protobuf.ByteString + getInterpreterClassNameBytes(); + + // required .hbase.pb.Scan scan = 2; + /** + *
required .hbase.pb.Scan scan = 2;
+ */
+ boolean hasScan();
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
+
+ // optional bytes interpreter_specific_bytes = 3;
+ /**
+ * optional bytes interpreter_specific_bytes = 3;
+ */
+ boolean hasInterpreterSpecificBytes();
+ /**
+ * optional bytes interpreter_specific_bytes = 3;
+ */
+ com.google.protobuf.ByteString getInterpreterSpecificBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.AggregateRequest}
+ */
+ public static final class AggregateRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements AggregateRequestOrBuilder {
+ // Use AggregateRequest.newBuilder() to construct.
+ private AggregateRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private AggregateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final AggregateRequest defaultInstance;
+ public static AggregateRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public AggregateRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private AggregateRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ interpreterClassName_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ subBuilder = scan_.toBuilder();
+ }
+ scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(scan_);
+ scan_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000002;
+ break;
+ }
+ case 26: {
+ bitField0_ |= 0x00000004;
+ interpreterSpecificBytes_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + public boolean hasInterpreterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
required string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + public java.lang.String getInterpreterClassName() { + java.lang.Object ref = interpreterClassName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + interpreterClassName_ = s; + } + return s; + } + } + /** + *
required string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + public com.google.protobuf.ByteString + getInterpreterClassNameBytes() { + java.lang.Object ref = interpreterClassName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + interpreterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required .hbase.pb.Scan scan = 2; + public static final int SCAN_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + /** + *
required .hbase.pb.Scan scan = 2;
+ */
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+ return scan_;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+ return scan_;
+ }
+
+ // optional bytes interpreter_specific_bytes = 3;
+ public static final int INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER = 3;
+ private com.google.protobuf.ByteString interpreterSpecificBytes_;
+ /**
+ * optional bytes interpreter_specific_bytes = 3;
+ */
+ public boolean hasInterpreterSpecificBytes() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * optional bytes interpreter_specific_bytes = 3;
+ */
+ public com.google.protobuf.ByteString getInterpreterSpecificBytes() {
+ return interpreterSpecificBytes_;
+ }
+
+ private void initFields() {
+ interpreterClassName_ = "";
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasInterpreterClassName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasScan()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getScan().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getInterpreterClassNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, scan_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBytes(3, interpreterSpecificBytes_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getInterpreterClassNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, scan_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(3, interpreterSpecificBytes_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest) obj;
+
+ boolean result = true;
+ result = result && (hasInterpreterClassName() == other.hasInterpreterClassName());
+ if (hasInterpreterClassName()) {
+ result = result && getInterpreterClassName()
+ .equals(other.getInterpreterClassName());
+ }
+ result = result && (hasScan() == other.hasScan());
+ if (hasScan()) {
+ result = result && getScan()
+ .equals(other.getScan());
+ }
+ result = result && (hasInterpreterSpecificBytes() == other.hasInterpreterSpecificBytes());
+ if (hasInterpreterSpecificBytes()) {
+ result = result && getInterpreterSpecificBytes()
+ .equals(other.getInterpreterSpecificBytes());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasInterpreterClassName()) {
+ hash = (37 * hash) + INTERPRETER_CLASS_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getInterpreterClassName().hashCode();
+ }
+ if (hasScan()) {
+ hash = (37 * hash) + SCAN_FIELD_NUMBER;
+ hash = (53 * hash) + getScan().hashCode();
+ }
+ if (hasInterpreterSpecificBytes()) {
+ hash = (37 * hash) + INTERPRETER_SPECIFIC_BYTES_FIELD_NUMBER;
+ hash = (53 * hash) + getInterpreterSpecificBytes().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.AggregateRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + public boolean hasInterpreterClassName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + *
required string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + public java.lang.String getInterpreterClassName() { + java.lang.Object ref = interpreterClassName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + interpreterClassName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + *
required string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + public com.google.protobuf.ByteString + getInterpreterClassNameBytes() { + java.lang.Object ref = interpreterClassName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + interpreterClassName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + *
required string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + public Builder setInterpreterClassName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + interpreterClassName_ = value; + onChanged(); + return this; + } + /** + *
required string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + public Builder clearInterpreterClassName() { + bitField0_ = (bitField0_ & ~0x00000001); + interpreterClassName_ = getDefaultInstance().getInterpreterClassName(); + onChanged(); + return this; + } + /** + *
required string interpreter_class_name = 1;
+ *
+ * + ** The request passed to the AggregateService consists of three parts + * (1) the (canonical) classname of the ColumnInterpreter implementation + * (2) the Scan query + * (3) any bytes required to construct the ColumnInterpreter object + * properly + *+ */ + public Builder setInterpreterClassNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + interpreterClassName_ = value; + onChanged(); + return this; + } + + // required .hbase.pb.Scan scan = 2; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + /** + *
required .hbase.pb.Scan scan = 2;
+ */
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
+ if (scanBuilder_ == null) {
+ return scan_;
+ } else {
+ return scanBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ scan_ = value;
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ public Builder setScan(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
+ if (scanBuilder_ == null) {
+ scan_ = builderForValue.build();
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) {
+ scan_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
+ } else {
+ scan_ = value;
+ }
+ onChanged();
+ } else {
+ scanBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ public Builder clearScan() {
+ if (scanBuilder_ == null) {
+ scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ onChanged();
+ } else {
+ scanBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getScanFieldBuilder().getBuilder();
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
+ if (scanBuilder_ != null) {
+ return scanBuilder_.getMessageOrBuilder();
+ } else {
+ return scan_;
+ }
+ }
+ /**
+ * required .hbase.pb.Scan scan = 2;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
+ getScanFieldBuilder() {
+ if (scanBuilder_ == null) {
+ scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
+ scan_,
+ getParentForChildren(),
+ isClean());
+ scan_ = null;
+ }
+ return scanBuilder_;
+ }
+
+ // optional bytes interpreter_specific_bytes = 3;
+ private com.google.protobuf.ByteString interpreterSpecificBytes_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * optional bytes interpreter_specific_bytes = 3;
+ */
+ public boolean hasInterpreterSpecificBytes() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * optional bytes interpreter_specific_bytes = 3;
+ */
+ public com.google.protobuf.ByteString getInterpreterSpecificBytes() {
+ return interpreterSpecificBytes_;
+ }
+ /**
+ * optional bytes interpreter_specific_bytes = 3;
+ */
+ public Builder setInterpreterSpecificBytes(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ interpreterSpecificBytes_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bytes interpreter_specific_bytes = 3;
+ */
+ public Builder clearInterpreterSpecificBytes() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ interpreterSpecificBytes_ = getDefaultInstance().getInterpreterSpecificBytes();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.AggregateRequest)
+ }
+
+ static {
+ defaultInstance = new AggregateRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.AggregateRequest)
+ }
+
+ public interface AggregateResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // repeated bytes first_part = 1;
+ /**
+ * repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + java.util.List
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + int getFirstPartCount(); + /** + *
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + com.google.protobuf.ByteString getFirstPart(int index); + + // optional bytes second_part = 2; + /** + *
optional bytes second_part = 2;
+ */
+ boolean hasSecondPart();
+ /**
+ * optional bytes second_part = 2;
+ */
+ com.google.protobuf.ByteString getSecondPart();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.AggregateResponse}
+ */
+ public static final class AggregateResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements AggregateResponseOrBuilder {
+ // Use AggregateResponse.newBuilder() to construct.
+ private AggregateResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private AggregateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final AggregateResponse defaultInstance;
+ public static AggregateResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public AggregateResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private AggregateResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ firstPart_ = new java.util.ArrayListrepeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public java.util.List
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public int getFirstPartCount() { + return firstPart_.size(); + } + /** + *
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public com.google.protobuf.ByteString getFirstPart(int index) { + return firstPart_.get(index); + } + + // optional bytes second_part = 2; + public static final int SECOND_PART_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString secondPart_; + /** + *
optional bytes second_part = 2;
+ */
+ public boolean hasSecondPart() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bytes second_part = 2;
+ */
+ public com.google.protobuf.ByteString getSecondPart() {
+ return secondPart_;
+ }
+
+ private void initFields() {
+ firstPart_ = java.util.Collections.emptyList();
+ secondPart_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < firstPart_.size(); i++) {
+ output.writeBytes(1, firstPart_.get(i));
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(2, secondPart_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ {
+ int dataSize = 0;
+ for (int i = 0; i < firstPart_.size(); i++) {
+ dataSize += com.google.protobuf.CodedOutputStream
+ .computeBytesSizeNoTag(firstPart_.get(i));
+ }
+ size += dataSize;
+ size += 1 * getFirstPartList().size();
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, secondPart_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse other = (org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse) obj;
+
+ boolean result = true;
+ result = result && getFirstPartList()
+ .equals(other.getFirstPartList());
+ result = result && (hasSecondPart() == other.hasSecondPart());
+ if (hasSecondPart()) {
+ result = result && getSecondPart()
+ .equals(other.getSecondPart());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getFirstPartCount() > 0) {
+ hash = (37 * hash) + FIRST_PART_FIELD_NUMBER;
+ hash = (53 * hash) + getFirstPartList().hashCode();
+ }
+ if (hasSecondPart()) {
+ hash = (37 * hash) + SECOND_PART_FIELD_NUMBER;
+ hash = (53 * hash) + getSecondPart().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.AggregateResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrepeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public java.util.List
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public int getFirstPartCount() { + return firstPart_.size(); + } + /** + *
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public com.google.protobuf.ByteString getFirstPart(int index) { + return firstPart_.get(index); + } + /** + *
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public Builder setFirstPart( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFirstPartIsMutable(); + firstPart_.set(index, value); + onChanged(); + return this; + } + /** + *
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public Builder addFirstPart(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureFirstPartIsMutable(); + firstPart_.add(value); + onChanged(); + return this; + } + /** + *
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public Builder addAllFirstPart( + java.lang.Iterable extends com.google.protobuf.ByteString> values) { + ensureFirstPartIsMutable(); + super.addAll(values, firstPart_); + onChanged(); + return this; + } + /** + *
repeated bytes first_part = 1;
+ *
+ * + ** + * The AggregateService methods all have a response that either is a Pair + * or a simple object. When it is a Pair both first_part and second_part + * have defined values (and the second_part is not present in the response + * when the response is not a pair). Refer to the AggregateImplementation + * class for an overview of the AggregateResponse object constructions. + *+ */ + public Builder clearFirstPart() { + firstPart_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000001); + onChanged(); + return this; + } + + // optional bytes second_part = 2; + private com.google.protobuf.ByteString secondPart_ = com.google.protobuf.ByteString.EMPTY; + /** + *
optional bytes second_part = 2;
+ */
+ public boolean hasSecondPart() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional bytes second_part = 2;
+ */
+ public com.google.protobuf.ByteString getSecondPart() {
+ return secondPart_;
+ }
+ /**
+ * optional bytes second_part = 2;
+ */
+ public Builder setSecondPart(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ secondPart_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bytes second_part = 2;
+ */
+ public Builder clearSecondPart() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ secondPart_ = getDefaultInstance().getSecondPart();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.AggregateResponse)
+ }
+
+ static {
+ defaultInstance = new AggregateResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.AggregateResponse)
+ }
+
+ /**
+ * Protobuf service {@code hbase.pb.AggregateService}
+ *
+ * + ** Refer to the AggregateImplementation class for an overview of the + * AggregateService method implementations and their functionality. + *+ */ + public static abstract class AggregateService + implements com.google.protobuf.Service { + protected AggregateService() {} + + public interface Interface { + /** + *
rpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getMax(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getMin(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getSum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getRowNum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getAvg(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getStd(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getMedian(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getMax(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getMin(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getSum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getRowNum(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getAvg(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getStd(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);
+ */
+ public abstract void getMedian(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest request,
+ com.google.protobuf.RpcCallbackrepeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ java.util.Listrepeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index);
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ int getFamilyPathCount();
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ java.util.List extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
+ getFamilyPathOrBuilderList();
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
+ int index);
+
+ // optional bool assign_seq_num = 2;
+ /**
+ * optional bool assign_seq_num = 2;
+ */
+ boolean hasAssignSeqNum();
+ /**
+ * optional bool assign_seq_num = 2;
+ */
+ boolean getAssignSeqNum();
+
+ // required .hbase.pb.DelegationToken fs_token = 3;
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ boolean hasFsToken();
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken();
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder();
+
+ // required string bulk_token = 4;
+ /**
+ * required string bulk_token = 4;
+ */
+ boolean hasBulkToken();
+ /**
+ * required string bulk_token = 4;
+ */
+ java.lang.String getBulkToken();
+ /**
+ * required string bulk_token = 4;
+ */
+ com.google.protobuf.ByteString
+ getBulkTokenBytes();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesRequest}
+ */
+ public static final class SecureBulkLoadHFilesRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements SecureBulkLoadHFilesRequestOrBuilder {
+ // Use SecureBulkLoadHFilesRequest.newBuilder() to construct.
+ private SecureBulkLoadHFilesRequest(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SecureBulkLoadHFilesRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SecureBulkLoadHFilesRequest defaultInstance;
+ public static SecureBulkLoadHFilesRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SecureBulkLoadHFilesRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SecureBulkLoadHFilesRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
+ familyPath_ = new java.util.ArrayListrepeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public java.util.Listrepeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
+ getFamilyPathOrBuilderList() {
+ return familyPath_;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public int getFamilyPathCount() {
+ return familyPath_.size();
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
+ return familyPath_.get(index);
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
+ int index) {
+ return familyPath_.get(index);
+ }
+
+ // optional bool assign_seq_num = 2;
+ public static final int ASSIGN_SEQ_NUM_FIELD_NUMBER = 2;
+ private boolean assignSeqNum_;
+ /**
+ * optional bool assign_seq_num = 2;
+ */
+ public boolean hasAssignSeqNum() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional bool assign_seq_num = 2;
+ */
+ public boolean getAssignSeqNum() {
+ return assignSeqNum_;
+ }
+
+ // required .hbase.pb.DelegationToken fs_token = 3;
+ public static final int FS_TOKEN_FIELD_NUMBER = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_;
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public boolean hasFsToken() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() {
+ return fsToken_;
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() {
+ return fsToken_;
+ }
+
+ // required string bulk_token = 4;
+ public static final int BULK_TOKEN_FIELD_NUMBER = 4;
+ private java.lang.Object bulkToken_;
+ /**
+ * required string bulk_token = 4;
+ */
+ public boolean hasBulkToken() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * required string bulk_token = 4;
+ */
+ public java.lang.String getBulkToken() {
+ java.lang.Object ref = bulkToken_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ if (bs.isValidUtf8()) {
+ bulkToken_ = s;
+ }
+ return s;
+ }
+ }
+ /**
+ * required string bulk_token = 4;
+ */
+ public com.google.protobuf.ByteString
+ getBulkTokenBytes() {
+ java.lang.Object ref = bulkToken_;
+ if (ref instanceof java.lang.String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ bulkToken_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ familyPath_ = java.util.Collections.emptyList();
+ assignSeqNum_ = false;
+ fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
+ bulkToken_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasFsToken()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasBulkToken()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ for (int i = 0; i < getFamilyPathCount(); i++) {
+ if (!getFamilyPath(i).isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ for (int i = 0; i < familyPath_.size(); i++) {
+ output.writeMessage(1, familyPath_.get(i));
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBool(2, assignSeqNum_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(3, fsToken_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBytes(4, getBulkTokenBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ for (int i = 0; i < familyPath_.size(); i++) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, familyPath_.get(i));
+ }
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(2, assignSeqNum_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, fsToken_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(4, getBulkTokenBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest) obj;
+
+ boolean result = true;
+ result = result && getFamilyPathList()
+ .equals(other.getFamilyPathList());
+ result = result && (hasAssignSeqNum() == other.hasAssignSeqNum());
+ if (hasAssignSeqNum()) {
+ result = result && (getAssignSeqNum()
+ == other.getAssignSeqNum());
+ }
+ result = result && (hasFsToken() == other.hasFsToken());
+ if (hasFsToken()) {
+ result = result && getFsToken()
+ .equals(other.getFsToken());
+ }
+ result = result && (hasBulkToken() == other.hasBulkToken());
+ if (hasBulkToken()) {
+ result = result && getBulkToken()
+ .equals(other.getBulkToken());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (getFamilyPathCount() > 0) {
+ hash = (37 * hash) + FAMILY_PATH_FIELD_NUMBER;
+ hash = (53 * hash) + getFamilyPathList().hashCode();
+ }
+ if (hasAssignSeqNum()) {
+ hash = (37 * hash) + ASSIGN_SEQ_NUM_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getAssignSeqNum());
+ }
+ if (hasFsToken()) {
+ hash = (37 * hash) + FS_TOKEN_FIELD_NUMBER;
+ hash = (53 * hash) + getFsToken().hashCode();
+ }
+ if (hasBulkToken()) {
+ hash = (37 * hash) + BULK_TOKEN_FIELD_NUMBER;
+ hash = (53 * hash) + getBulkToken().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrepeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public java.util.Listrepeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public int getFamilyPathCount() {
+ if (familyPathBuilder_ == null) {
+ return familyPath_.size();
+ } else {
+ return familyPathBuilder_.getCount();
+ }
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath getFamilyPath(int index) {
+ if (familyPathBuilder_ == null) {
+ return familyPath_.get(index);
+ } else {
+ return familyPathBuilder_.getMessage(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public Builder setFamilyPath(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
+ if (familyPathBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFamilyPathIsMutable();
+ familyPath_.set(index, value);
+ onChanged();
+ } else {
+ familyPathBuilder_.setMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public Builder setFamilyPath(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
+ if (familyPathBuilder_ == null) {
+ ensureFamilyPathIsMutable();
+ familyPath_.set(index, builderForValue.build());
+ onChanged();
+ } else {
+ familyPathBuilder_.setMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public Builder addFamilyPath(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
+ if (familyPathBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFamilyPathIsMutable();
+ familyPath_.add(value);
+ onChanged();
+ } else {
+ familyPathBuilder_.addMessage(value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public Builder addFamilyPath(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath value) {
+ if (familyPathBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ ensureFamilyPathIsMutable();
+ familyPath_.add(index, value);
+ onChanged();
+ } else {
+ familyPathBuilder_.addMessage(index, value);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public Builder addFamilyPath(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
+ if (familyPathBuilder_ == null) {
+ ensureFamilyPathIsMutable();
+ familyPath_.add(builderForValue.build());
+ onChanged();
+ } else {
+ familyPathBuilder_.addMessage(builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public Builder addFamilyPath(
+ int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder builderForValue) {
+ if (familyPathBuilder_ == null) {
+ ensureFamilyPathIsMutable();
+ familyPath_.add(index, builderForValue.build());
+ onChanged();
+ } else {
+ familyPathBuilder_.addMessage(index, builderForValue.build());
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public Builder addAllFamilyPath(
+ java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath> values) {
+ if (familyPathBuilder_ == null) {
+ ensureFamilyPathIsMutable();
+ super.addAll(values, familyPath_);
+ onChanged();
+ } else {
+ familyPathBuilder_.addAllMessages(values);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public Builder clearFamilyPath() {
+ if (familyPathBuilder_ == null) {
+ familyPath_ = java.util.Collections.emptyList();
+ bitField0_ = (bitField0_ & ~0x00000001);
+ onChanged();
+ } else {
+ familyPathBuilder_.clear();
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public Builder removeFamilyPath(int index) {
+ if (familyPathBuilder_ == null) {
+ ensureFamilyPathIsMutable();
+ familyPath_.remove(index);
+ onChanged();
+ } else {
+ familyPathBuilder_.remove(index);
+ }
+ return this;
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder getFamilyPathBuilder(
+ int index) {
+ return getFamilyPathFieldBuilder().getBuilder(index);
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder getFamilyPathOrBuilder(
+ int index) {
+ if (familyPathBuilder_ == null) {
+ return familyPath_.get(index); } else {
+ return familyPathBuilder_.getMessageOrBuilder(index);
+ }
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPathOrBuilder>
+ getFamilyPathOrBuilderList() {
+ if (familyPathBuilder_ != null) {
+ return familyPathBuilder_.getMessageOrBuilderList();
+ } else {
+ return java.util.Collections.unmodifiableList(familyPath_);
+ }
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder() {
+ return getFamilyPathFieldBuilder().addBuilder(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.Builder addFamilyPathBuilder(
+ int index) {
+ return getFamilyPathFieldBuilder().addBuilder(
+ index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.FamilyPath.getDefaultInstance());
+ }
+ /**
+ * repeated .hbase.pb.BulkLoadHFileRequest.FamilyPath family_path = 1;
+ */
+ public java.util.Listoptional bool assign_seq_num = 2;
+ */
+ public boolean hasAssignSeqNum() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional bool assign_seq_num = 2;
+ */
+ public boolean getAssignSeqNum() {
+ return assignSeqNum_;
+ }
+ /**
+ * optional bool assign_seq_num = 2;
+ */
+ public Builder setAssignSeqNum(boolean value) {
+ bitField0_ |= 0x00000002;
+ assignSeqNum_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional bool assign_seq_num = 2;
+ */
+ public Builder clearAssignSeqNum() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ assignSeqNum_ = false;
+ onChanged();
+ return this;
+ }
+
+ // required .hbase.pb.DelegationToken fs_token = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder> fsTokenBuilder_;
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public boolean hasFsToken() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken getFsToken() {
+ if (fsTokenBuilder_ == null) {
+ return fsToken_;
+ } else {
+ return fsTokenBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public Builder setFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) {
+ if (fsTokenBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ fsToken_ = value;
+ onChanged();
+ } else {
+ fsTokenBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public Builder setFsToken(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder builderForValue) {
+ if (fsTokenBuilder_ == null) {
+ fsToken_ = builderForValue.build();
+ onChanged();
+ } else {
+ fsTokenBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public Builder mergeFsToken(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken value) {
+ if (fsTokenBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ fsToken_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance()) {
+ fsToken_ =
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.newBuilder(fsToken_).mergeFrom(value).buildPartial();
+ } else {
+ fsToken_ = value;
+ }
+ onChanged();
+ } else {
+ fsTokenBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public Builder clearFsToken() {
+ if (fsTokenBuilder_ == null) {
+ fsToken_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.getDefaultInstance();
+ onChanged();
+ } else {
+ fsTokenBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder getFsTokenBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getFsTokenFieldBuilder().getBuilder();
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder getFsTokenOrBuilder() {
+ if (fsTokenBuilder_ != null) {
+ return fsTokenBuilder_.getMessageOrBuilder();
+ } else {
+ return fsToken_;
+ }
+ }
+ /**
+ * required .hbase.pb.DelegationToken fs_token = 3;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>
+ getFsTokenFieldBuilder() {
+ if (fsTokenBuilder_ == null) {
+ fsTokenBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationToken.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.DelegationTokenOrBuilder>(
+ fsToken_,
+ getParentForChildren(),
+ isClean());
+ fsToken_ = null;
+ }
+ return fsTokenBuilder_;
+ }
+
+ // required string bulk_token = 4;
+ private java.lang.Object bulkToken_ = "";
+ /**
+ * required string bulk_token = 4;
+ */
+ public boolean hasBulkToken() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * required string bulk_token = 4;
+ */
+ public java.lang.String getBulkToken() {
+ java.lang.Object ref = bulkToken_;
+ if (!(ref instanceof java.lang.String)) {
+ java.lang.String s = ((com.google.protobuf.ByteString) ref)
+ .toStringUtf8();
+ bulkToken_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ * required string bulk_token = 4;
+ */
+ public com.google.protobuf.ByteString
+ getBulkTokenBytes() {
+ java.lang.Object ref = bulkToken_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ bulkToken_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ * required string bulk_token = 4;
+ */
+ public Builder setBulkToken(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ bulkToken_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required string bulk_token = 4;
+ */
+ public Builder clearBulkToken() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ bulkToken_ = getDefaultInstance().getBulkToken();
+ onChanged();
+ return this;
+ }
+ /**
+ * required string bulk_token = 4;
+ */
+ public Builder setBulkTokenBytes(
+ com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000008;
+ bulkToken_ = value;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.SecureBulkLoadHFilesRequest)
+ }
+
+ static {
+ defaultInstance = new SecureBulkLoadHFilesRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.SecureBulkLoadHFilesRequest)
+ }
+
+ public interface SecureBulkLoadHFilesResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required bool loaded = 1;
+ /**
+ * required bool loaded = 1;
+ */
+ boolean hasLoaded();
+ /**
+ * required bool loaded = 1;
+ */
+ boolean getLoaded();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesResponse}
+ */
+ public static final class SecureBulkLoadHFilesResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements SecureBulkLoadHFilesResponseOrBuilder {
+ // Use SecureBulkLoadHFilesResponse.newBuilder() to construct.
+ private SecureBulkLoadHFilesResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private SecureBulkLoadHFilesResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final SecureBulkLoadHFilesResponse defaultInstance;
+ public static SecureBulkLoadHFilesResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public SecureBulkLoadHFilesResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private SecureBulkLoadHFilesResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ loaded_ = input.readBool();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.internal_static_hbase_pb_SecureBulkLoadHFilesResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.class, org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired bool loaded = 1;
+ */
+ public boolean hasLoaded() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bool loaded = 1;
+ */
+ public boolean getLoaded() {
+ return loaded_;
+ }
+
+ private void initFields() {
+ loaded_ = false;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasLoaded()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBool(1, loaded_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBoolSize(1, loaded_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse other = (org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse) obj;
+
+ boolean result = true;
+ result = result && (hasLoaded() == other.hasLoaded());
+ if (hasLoaded()) {
+ result = result && (getLoaded()
+ == other.getLoaded());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasLoaded()) {
+ hash = (37 * hash) + LOADED_FIELD_NUMBER;
+ hash = (53 * hash) + hashBoolean(getLoaded());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.SecureBulkLoadHFilesResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired bool loaded = 1;
+ */
+ public boolean hasLoaded() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required bool loaded = 1;
+ */
+ public boolean getLoaded() {
+ return loaded_;
+ }
+ /**
+ * required bool loaded = 1;
+ */
+ public Builder setLoaded(boolean value) {
+ bitField0_ |= 0x00000001;
+ loaded_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bool loaded = 1;
+ */
+ public Builder clearLoaded() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ loaded_ = false;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.SecureBulkLoadHFilesResponse)
+ }
+
+ static {
+ defaultInstance = new SecureBulkLoadHFilesResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.SecureBulkLoadHFilesResponse)
+ }
+
+ /**
+ * Protobuf service {@code hbase.pb.SecureBulkLoadService}
+ */
+ public static abstract class SecureBulkLoadService
+ implements com.google.protobuf.Service {
+ protected SecureBulkLoadService() {}
+
+ public interface Interface {
+ /**
+ * rpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse);
+ */
+ public abstract void prepareBulkLoad(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
+ com.google.protobuf.RpcCallbackrpc SecureBulkLoadHFiles(.hbase.pb.SecureBulkLoadHFilesRequest) returns (.hbase.pb.SecureBulkLoadHFilesResponse);
+ */
+ public abstract void secureBulkLoadHFiles(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request,
+ com.google.protobuf.RpcCallbackrpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse);
+ */
+ public abstract void cleanupBulkLoad(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
+ com.google.protobuf.RpcCallbackrpc PrepareBulkLoad(.hbase.pb.PrepareBulkLoadRequest) returns (.hbase.pb.PrepareBulkLoadResponse);
+ */
+ public abstract void prepareBulkLoad(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.PrepareBulkLoadRequest request,
+ com.google.protobuf.RpcCallbackrpc SecureBulkLoadHFiles(.hbase.pb.SecureBulkLoadHFilesRequest) returns (.hbase.pb.SecureBulkLoadHFilesResponse);
+ */
+ public abstract void secureBulkLoadHFiles(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.SecureBulkLoadHFilesRequest request,
+ com.google.protobuf.RpcCallbackrpc CleanupBulkLoad(.hbase.pb.CleanupBulkLoadRequest) returns (.hbase.pb.CleanupBulkLoadResponse);
+ */
+ public abstract void cleanupBulkLoad(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CleanupBulkLoadRequest request,
+ com.google.protobuf.RpcCallbackrequired int64 count = 1 [default = 0];
+ */
+ boolean hasCount();
+ /**
+ * required int64 count = 1 [default = 0];
+ */
+ long getCount();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.CountResponse}
+ */
+ public static final class CountResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements CountResponseOrBuilder {
+ // Use CountResponse.newBuilder() to construct.
+ private CountResponse(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private CountResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final CountResponse defaultInstance;
+ public static CountResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public CountResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private CountResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ count_ = input.readInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parserrequired int64 count = 1 [default = 0];
+ */
+ public boolean hasCount() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required int64 count = 1 [default = 0];
+ */
+ public long getCount() {
+ return count_;
+ }
+
+ private void initFields() {
+ count_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasCount()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeInt64(1, count_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeInt64Size(1, count_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse other = (org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse) obj;
+
+ boolean result = true;
+ result = result && (hasCount() == other.hasCount());
+ if (hasCount()) {
+ result = result && (getCount()
+ == other.getCount());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasCount()) {
+ hash = (37 * hash) + COUNT_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getCount());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.CountResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builderrequired int64 count = 1 [default = 0];
+ */
+ public boolean hasCount() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required int64 count = 1 [default = 0];
+ */
+ public long getCount() {
+ return count_;
+ }
+ /**
+ * required int64 count = 1 [default = 0];
+ */
+ public Builder setCount(long value) {
+ bitField0_ |= 0x00000001;
+ count_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required int64 count = 1 [default = 0];
+ */
+ public Builder clearCount() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ count_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.CountResponse)
+ }
+
+ static {
+ defaultInstance = new CountResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.CountResponse)
+ }
+
+ /**
+ * Protobuf service {@code hbase.pb.RowCountService}
+ */
+ public static abstract class RowCountService
+ implements com.google.protobuf.Service {
+ protected RowCountService() {}
+
+ public interface Interface {
+ /**
+ * rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);
+ */
+ public abstract void getRowCount(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+ com.google.protobuf.RpcCallbackrpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);
+ */
+ public abstract void getKeyValueCount(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+ com.google.protobuf.RpcCallbackrpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);
+ */
+ public abstract void getRowCount(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+ com.google.protobuf.RpcCallbackrpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);
+ */
+ public abstract void getKeyValueCount(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest request,
+ com.google.protobuf.RpcCallback
+ * This implementation assumes all subclasses are array based, supporting random access.
+ */
+abstract class AbstractProtobufList
+ * `Any` contains an arbitrary serialized protocol buffer message along with a
+ * URL that describes the type of the serialized message.
+ * Protobuf library provides support to pack/unpack Any values in the form
+ * of utility functions or additional generated methods of the Any type.
+ * Example 1: Pack and unpack a message in C++.
+ * Foo foo = ...;
+ * Any any;
+ * any.PackFrom(foo);
+ * ...
+ * if (any.UnpackTo(&foo)) {
+ * ...
+ * }
+ * Example 2: Pack and unpack a message in Java.
+ * Foo foo = ...;
+ * Any any = Any.pack(foo);
+ * ...
+ * if (any.is(Foo.class)) {
+ * foo = any.unpack(Foo.class);
+ * }
+ * Example 3: Pack and unpack a message in Python.
+ * foo = Foo(...)
+ * any = Any()
+ * any.Pack(foo)
+ * ...
+ * if any.Is(Foo.DESCRIPTOR):
+ * any.Unpack(foo)
+ * ...
+ * The pack methods provided by protobuf library will by default use
+ * 'type.googleapis.com/full.type.name' as the type URL and the unpack
+ * methods only use the fully qualified type name after the last '/'
+ * in the type URL, for example "foo.bar.com/x/y.z" will yield type
+ * name "y.z".
+ * JSON
+ * ====
+ * The JSON representation of an `Any` value uses the regular
+ * representation of the deserialized, embedded message, with an
+ * additional field `@type` which contains the type URL. Example:
+ * package google.profile;
+ * message Person {
+ * string first_name = 1;
+ * string last_name = 2;
+ * }
+ * {
+ * "@type": "type.googleapis.com/google.profile.Person",
+ * "firstName": <string>,
+ * "lastName": <string>
+ * }
+ * If the embedded message type is well-known and has a custom JSON
+ * representation, that representation will be embedded adding a field
+ * `value` which holds the custom JSON in addition to the `@type`
+ * field. Example (for message [google.protobuf.Duration][]):
+ * {
+ * "@type": "type.googleapis.org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration",
+ * "value": "1.212s"
+ * }
+ *
+ *
+ * Protobuf type {@code google.protobuf.Any}
+ */
+public final class Any extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements
+ // @@protoc_insertion_point(message_implements:google.protobuf.Any)
+ AnyOrBuilder {
+ // Use Any.newBuilder() to construct.
+ private Any(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder> builder) {
+ super(builder);
+ }
+ private Any() {
+ typeUrl_ = "";
+ value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+ }
+
+ @java.lang.Override
+ public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.getDefaultInstance();
+ }
+ private Any(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ this();
+ int mutable_bitField0_ = 0;
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!input.skipField(tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ java.lang.String s = input.readStringRequireUtf8();
+
+ typeUrl_ = s;
+ break;
+ }
+ case 18: {
+
+ value_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException(
+ e).setUnfinishedMessage(this);
+ } finally {
+ makeExtensionsImmutable();
+ }
+ }
+ public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_descriptor;
+ }
+
+ protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.AnyProto.internal_static_google_protobuf_Any_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.class, org.apache.hadoop.hbase.shaded.com.google.protobuf.Any.Builder.class);
+ }
+
+ private static String getTypeUrl(
+ java.lang.String typeUrlPrefix,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor descriptor) {
+ return typeUrlPrefix.endsWith("/")
+ ? typeUrlPrefix + descriptor.getFullName()
+ : typeUrlPrefix + "/" + descriptor.getFullName();
+ }
+
+ private static String getTypeNameFromTypeUrl(
+ java.lang.String typeUrl) {
+ int pos = typeUrl.lastIndexOf('/');
+ return pos == -1 ? "" : typeUrl.substring(pos + 1);
+ }
+
+ public static
+ * A URL/resource name whose content describes the type of the
+ * serialized protocol buffer message.
+ * For URLs which use the scheme `http`, `https`, or no scheme, the
+ * following restrictions and interpretations apply:
+ * * If no scheme is provided, `https` is assumed.
+ * * The last segment of the URL's path must represent the fully
+ * qualified name of the type (as in `path/google.protobuf.Duration`).
+ * The name should be in a canonical form (e.g., leading "." is
+ * not accepted).
+ * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ * value in binary format, or produce an error.
+ * * Applications are allowed to cache lookup results based on the
+ * URL, or have them precompiled into a binary to avoid any
+ * lookup. Therefore, binary compatibility needs to be preserved
+ * on changes to types. (Use versioned type names to manage
+ * breaking changes.)
+ * Schemes other than `http`, `https` (or the empty scheme) might be
+ * used with implementation specific semantics.
+ *
+ *
+ * string type_url = 1;
+ */
+ public java.lang.String getTypeUrl() {
+ java.lang.Object ref = typeUrl_;
+ if (ref instanceof java.lang.String) {
+ return (java.lang.String) ref;
+ } else {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ typeUrl_ = s;
+ return s;
+ }
+ }
+ /**
+ *
+ * A URL/resource name whose content describes the type of the
+ * serialized protocol buffer message.
+ * For URLs which use the scheme `http`, `https`, or no scheme, the
+ * following restrictions and interpretations apply:
+ * * If no scheme is provided, `https` is assumed.
+ * * The last segment of the URL's path must represent the fully
+ * qualified name of the type (as in `path/google.protobuf.Duration`).
+ * The name should be in a canonical form (e.g., leading "." is
+ * not accepted).
+ * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ * value in binary format, or produce an error.
+ * * Applications are allowed to cache lookup results based on the
+ * URL, or have them precompiled into a binary to avoid any
+ * lookup. Therefore, binary compatibility needs to be preserved
+ * on changes to types. (Use versioned type names to manage
+ * breaking changes.)
+ * Schemes other than `http`, `https` (or the empty scheme) might be
+ * used with implementation specific semantics.
+ *
+ *
+ * string type_url = 1;
+ */
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+ getTypeUrlBytes() {
+ java.lang.Object ref = typeUrl_;
+ if (ref instanceof java.lang.String) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ typeUrl_ = b;
+ return b;
+ } else {
+ return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ public static final int VALUE_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_;
+ /**
+ *
+ * Must be a valid serialized protocol buffer of the above specified type.
+ *
+ *
+ * bytes value = 2;
+ */
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() {
+ return value_;
+ }
+
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized == 1) return true;
+ if (isInitialized == 0) return false;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ if (!getTypeUrlBytes().isEmpty()) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.writeString(output, 1, typeUrl_);
+ }
+ if (!value_.isEmpty()) {
+ output.writeBytes(2, value_);
+ }
+ }
+
+ public int getSerializedSize() {
+ int size = memoizedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (!getTypeUrlBytes().isEmpty()) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.computeStringSize(1, typeUrl_);
+ }
+ if (!value_.isEmpty()) {
+ size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, value_);
+ }
+ memoizedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.shaded.com.google.protobuf.Any)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.Any other = (org.apache.hadoop.hbase.shaded.com.google.protobuf.Any) obj;
+
+ boolean result = true;
+ result = result && getTypeUrl()
+ .equals(other.getTypeUrl());
+ result = result && getValue()
+ .equals(other.getValue());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptor().hashCode();
+ hash = (37 * hash) + TYPE_URL_FIELD_NUMBER;
+ hash = (53 * hash) + getTypeUrl().hashCode();
+ hash = (37 * hash) + VALUE_FIELD_NUMBER;
+ hash = (53 * hash) + getValue().hashCode();
+ hash = (29 * hash) + unknownFields.hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(byte[] data)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
+ byte[] data,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseDelimitedFrom(
+ java.io.InputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input);
+ }
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any parseFrom(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input,
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3
+ .parseWithIOException(PARSER, input, extensionRegistry);
+ }
+
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder() {
+ return DEFAULT_INSTANCE.toBuilder();
+ }
+ public static Builder newBuilder(org.apache.hadoop.hbase.shaded.com.google.protobuf.Any prototype) {
+ return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() {
+ return this == DEFAULT_INSTANCE
+ ? new Builder() : new Builder().mergeFrom(this);
+ }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ *
+ * `Any` contains an arbitrary serialized protocol buffer message along with a
+ * URL that describes the type of the serialized message.
+ * Protobuf library provides support to pack/unpack Any values in the form
+ * of utility functions or additional generated methods of the Any type.
+ * Example 1: Pack and unpack a message in C++.
+ * Foo foo = ...;
+ * Any any;
+ * any.PackFrom(foo);
+ * ...
+ * if (any.UnpackTo(&foo)) {
+ * ...
+ * }
+ * Example 2: Pack and unpack a message in Java.
+ * Foo foo = ...;
+ * Any any = Any.pack(foo);
+ * ...
+ * if (any.is(Foo.class)) {
+ * foo = any.unpack(Foo.class);
+ * }
+ * Example 3: Pack and unpack a message in Python.
+ * foo = Foo(...)
+ * any = Any()
+ * any.Pack(foo)
+ * ...
+ * if any.Is(Foo.DESCRIPTOR):
+ * any.Unpack(foo)
+ * ...
+ * The pack methods provided by protobuf library will by default use
+ * 'type.googleapis.com/full.type.name' as the type URL and the unpack
+ * methods only use the fully qualified type name after the last '/'
+ * in the type URL, for example "foo.bar.com/x/y.z" will yield type
+ * name "y.z".
+ * JSON
+ * ====
+ * The JSON representation of an `Any` value uses the regular
+ * representation of the deserialized, embedded message, with an
+ * additional field `@type` which contains the type URL. Example:
+ * package google.profile;
+ * message Person {
+ * string first_name = 1;
+ * string last_name = 2;
+ * }
+ * {
+ * "@type": "type.googleapis.com/google.profile.Person",
+ * "firstName": <string>,
+ * "lastName": <string>
+ * }
+ * If the embedded message type is well-known and has a custom JSON
+ * representation, that representation will be embedded adding a field
+ * `value` which holds the custom JSON in addition to the `@type`
+ * field. Example (for message [google.protobuf.Duration][]):
+ * {
+ * "@type": "type.googleapis.org.apache.hadoop.hbase.shaded.com.google.protobuf.Duration",
+ * "value": "1.212s"
+ * }
+ *
+ *
+ * Protobuf type {@code google.protobuf.Any}
+ */
+ public static final class Builder extends
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder
+ * A URL/resource name whose content describes the type of the
+ * serialized protocol buffer message.
+ * For URLs which use the scheme `http`, `https`, or no scheme, the
+ * following restrictions and interpretations apply:
+ * * If no scheme is provided, `https` is assumed.
+ * * The last segment of the URL's path must represent the fully
+ * qualified name of the type (as in `path/google.protobuf.Duration`).
+ * The name should be in a canonical form (e.g., leading "." is
+ * not accepted).
+ * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ * value in binary format, or produce an error.
+ * * Applications are allowed to cache lookup results based on the
+ * URL, or have them precompiled into a binary to avoid any
+ * lookup. Therefore, binary compatibility needs to be preserved
+ * on changes to types. (Use versioned type names to manage
+ * breaking changes.)
+ * Schemes other than `http`, `https` (or the empty scheme) might be
+ * used with implementation specific semantics.
+ *
+ *
+ * string type_url = 1;
+ */
+ public java.lang.String getTypeUrl() {
+ java.lang.Object ref = typeUrl_;
+ if (!(ref instanceof java.lang.String)) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs =
+ (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ java.lang.String s = bs.toStringUtf8();
+ typeUrl_ = s;
+ return s;
+ } else {
+ return (java.lang.String) ref;
+ }
+ }
+ /**
+ *
+ * A URL/resource name whose content describes the type of the
+ * serialized protocol buffer message.
+ * For URLs which use the scheme `http`, `https`, or no scheme, the
+ * following restrictions and interpretations apply:
+ * * If no scheme is provided, `https` is assumed.
+ * * The last segment of the URL's path must represent the fully
+ * qualified name of the type (as in `path/google.protobuf.Duration`).
+ * The name should be in a canonical form (e.g., leading "." is
+ * not accepted).
+ * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ * value in binary format, or produce an error.
+ * * Applications are allowed to cache lookup results based on the
+ * URL, or have them precompiled into a binary to avoid any
+ * lookup. Therefore, binary compatibility needs to be preserved
+ * on changes to types. (Use versioned type names to manage
+ * breaking changes.)
+ * Schemes other than `http`, `https` (or the empty scheme) might be
+ * used with implementation specific semantics.
+ *
+ *
+ * string type_url = 1;
+ */
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString
+ getTypeUrlBytes() {
+ java.lang.Object ref = typeUrl_;
+ if (ref instanceof String) {
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString b =
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.copyFromUtf8(
+ (java.lang.String) ref);
+ typeUrl_ = b;
+ return b;
+ } else {
+ return (org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString) ref;
+ }
+ }
+ /**
+ *
+ * A URL/resource name whose content describes the type of the
+ * serialized protocol buffer message.
+ * For URLs which use the scheme `http`, `https`, or no scheme, the
+ * following restrictions and interpretations apply:
+ * * If no scheme is provided, `https` is assumed.
+ * * The last segment of the URL's path must represent the fully
+ * qualified name of the type (as in `path/google.protobuf.Duration`).
+ * The name should be in a canonical form (e.g., leading "." is
+ * not accepted).
+ * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ * value in binary format, or produce an error.
+ * * Applications are allowed to cache lookup results based on the
+ * URL, or have them precompiled into a binary to avoid any
+ * lookup. Therefore, binary compatibility needs to be preserved
+ * on changes to types. (Use versioned type names to manage
+ * breaking changes.)
+ * Schemes other than `http`, `https` (or the empty scheme) might be
+ * used with implementation specific semantics.
+ *
+ *
+ * string type_url = 1;
+ */
+ public Builder setTypeUrl(
+ java.lang.String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ typeUrl_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ * A URL/resource name whose content describes the type of the
+ * serialized protocol buffer message.
+ * For URLs which use the scheme `http`, `https`, or no scheme, the
+ * following restrictions and interpretations apply:
+ * * If no scheme is provided, `https` is assumed.
+ * * The last segment of the URL's path must represent the fully
+ * qualified name of the type (as in `path/google.protobuf.Duration`).
+ * The name should be in a canonical form (e.g., leading "." is
+ * not accepted).
+ * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ * value in binary format, or produce an error.
+ * * Applications are allowed to cache lookup results based on the
+ * URL, or have them precompiled into a binary to avoid any
+ * lookup. Therefore, binary compatibility needs to be preserved
+ * on changes to types. (Use versioned type names to manage
+ * breaking changes.)
+ * Schemes other than `http`, `https` (or the empty scheme) might be
+ * used with implementation specific semantics.
+ *
+ *
+ * string type_url = 1;
+ */
+ public Builder clearTypeUrl() {
+
+ typeUrl_ = getDefaultInstance().getTypeUrl();
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ * A URL/resource name whose content describes the type of the
+ * serialized protocol buffer message.
+ * For URLs which use the scheme `http`, `https`, or no scheme, the
+ * following restrictions and interpretations apply:
+ * * If no scheme is provided, `https` is assumed.
+ * * The last segment of the URL's path must represent the fully
+ * qualified name of the type (as in `path/google.protobuf.Duration`).
+ * The name should be in a canonical form (e.g., leading "." is
+ * not accepted).
+ * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
+ * value in binary format, or produce an error.
+ * * Applications are allowed to cache lookup results based on the
+ * URL, or have them precompiled into a binary to avoid any
+ * lookup. Therefore, binary compatibility needs to be preserved
+ * on changes to types. (Use versioned type names to manage
+ * breaking changes.)
+ * Schemes other than `http`, `https` (or the empty scheme) might be
+ * used with implementation specific semantics.
+ *
+ *
+ * string type_url = 1;
+ */
+ public Builder setTypeUrlBytes(
+ org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ checkByteStringIsUtf8(value);
+
+ typeUrl_ = value;
+ onChanged();
+ return this;
+ }
+
+ private org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value_ = org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString.EMPTY;
+ /**
+ *
+ * Must be a valid serialized protocol buffer of the above specified type.
+ *
+ *
+ * bytes value = 2;
+ */
+ public org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString getValue() {
+ return value_;
+ }
+ /**
+ *
+ * Must be a valid serialized protocol buffer of the above specified type.
+ *
+ *
+ * bytes value = 2;
+ */
+ public Builder setValue(org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+
+ value_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ *
+ * Must be a valid serialized protocol buffer of the above specified type.
+ *
+ *
+ * bytes value = 2;
+ */
+ public Builder clearValue() {
+
+ value_ = getDefaultInstance().getValue();
+ onChanged();
+ return this;
+ }
+ public final Builder setUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return this;
+ }
+
+ public final Builder mergeUnknownFields(
+ final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) {
+ return this;
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:google.protobuf.Any)
+ }
+
+ // @@protoc_insertion_point(class_scope:google.protobuf.Any)
+ private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Any DEFAULT_INSTANCE;
+ static {
+ DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.com.google.protobuf.Any();
+ }
+
+ public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Any getDefaultInstance() {
+ return DEFAULT_INSTANCE;
+ }
+
+ private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser