HBASE-14077 Add package to hbase-protocol protobuf files.

This commit is contained in:
Elliott Clark 2015-07-14 14:48:02 -07:00
parent a63e3ac83f
commit 62bce5f903
65 changed files with 12903 additions and 12734 deletions

View File

@ -11,27 +11,27 @@ public final class BulkDeleteProtos {
public interface BulkDeleteRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .Scan scan = 1;
// required .hbase.pb.Scan scan = 1;
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
boolean hasScan();
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
// required .BulkDeleteRequest.DeleteType deleteType = 2;
// required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
/**
* <code>required .BulkDeleteRequest.DeleteType deleteType = 2;</code>
* <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;</code>
*/
boolean hasDeleteType();
/**
* <code>required .BulkDeleteRequest.DeleteType deleteType = 2;</code>
* <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;</code>
*/
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType();
@ -56,7 +56,7 @@ public final class BulkDeleteProtos {
int getRowBatchSize();
}
/**
* Protobuf type {@code BulkDeleteRequest}
* Protobuf type {@code hbase.pb.BulkDeleteRequest}
*/
public static final class BulkDeleteRequest extends
com.google.protobuf.GeneratedMessage
@ -154,12 +154,12 @@ public final class BulkDeleteProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
}
@ -180,7 +180,7 @@ public final class BulkDeleteProtos {
}
/**
* Protobuf enum {@code BulkDeleteRequest.DeleteType}
* Protobuf enum {@code hbase.pb.BulkDeleteRequest.DeleteType}
*/
public enum DeleteType
implements com.google.protobuf.ProtocolMessageEnum {
@ -276,43 +276,43 @@ public final class BulkDeleteProtos {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:BulkDeleteRequest.DeleteType)
// @@protoc_insertion_point(enum_scope:hbase.pb.BulkDeleteRequest.DeleteType)
}
private int bitField0_;
// required .Scan scan = 1;
// required .hbase.pb.Scan scan = 1;
public static final int SCAN_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public boolean hasScan() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
return scan_;
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
return scan_;
}
// required .BulkDeleteRequest.DeleteType deleteType = 2;
// required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
public static final int DELETETYPE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_;
/**
* <code>required .BulkDeleteRequest.DeleteType deleteType = 2;</code>
* <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;</code>
*/
public boolean hasDeleteType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .BulkDeleteRequest.DeleteType deleteType = 2;</code>
* <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;</code>
*/
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() {
return deleteType_;
@ -565,19 +565,19 @@ public final class BulkDeleteProtos {
return builder;
}
/**
* Protobuf type {@code BulkDeleteRequest}
* Protobuf type {@code hbase.pb.BulkDeleteRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
}
@ -624,7 +624,7 @@ public final class BulkDeleteProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest getDefaultInstanceForType() {
@ -734,18 +734,18 @@ public final class BulkDeleteProtos {
}
private int bitField0_;
// required .Scan scan = 1;
// required .hbase.pb.Scan scan = 1;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public boolean hasScan() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
if (scanBuilder_ == null) {
@ -755,7 +755,7 @@ public final class BulkDeleteProtos {
}
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
@ -771,7 +771,7 @@ public final class BulkDeleteProtos {
return this;
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public Builder setScan(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
@ -785,7 +785,7 @@ public final class BulkDeleteProtos {
return this;
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
@ -804,7 +804,7 @@ public final class BulkDeleteProtos {
return this;
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public Builder clearScan() {
if (scanBuilder_ == null) {
@ -817,7 +817,7 @@ public final class BulkDeleteProtos {
return this;
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
bitField0_ |= 0x00000001;
@ -825,7 +825,7 @@ public final class BulkDeleteProtos {
return getScanFieldBuilder().getBuilder();
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
if (scanBuilder_ != null) {
@ -835,7 +835,7 @@ public final class BulkDeleteProtos {
}
}
/**
* <code>required .Scan scan = 1;</code>
* <code>required .hbase.pb.Scan scan = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
@ -851,22 +851,22 @@ public final class BulkDeleteProtos {
return scanBuilder_;
}
// required .BulkDeleteRequest.DeleteType deleteType = 2;
// required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
private org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType deleteType_ = org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
/**
* <code>required .BulkDeleteRequest.DeleteType deleteType = 2;</code>
* <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;</code>
*/
public boolean hasDeleteType() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .BulkDeleteRequest.DeleteType deleteType = 2;</code>
* <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;</code>
*/
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType getDeleteType() {
return deleteType_;
}
/**
* <code>required .BulkDeleteRequest.DeleteType deleteType = 2;</code>
* <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;</code>
*/
public Builder setDeleteType(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType value) {
if (value == null) {
@ -878,7 +878,7 @@ public final class BulkDeleteProtos {
return this;
}
/**
* <code>required .BulkDeleteRequest.DeleteType deleteType = 2;</code>
* <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;</code>
*/
public Builder clearDeleteType() {
bitField0_ = (bitField0_ & ~0x00000002);
@ -953,7 +953,7 @@ public final class BulkDeleteProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:BulkDeleteRequest)
// @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteRequest)
}
static {
@ -961,7 +961,7 @@ public final class BulkDeleteProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:BulkDeleteRequest)
// @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteRequest)
}
public interface BulkDeleteResponseOrBuilder
@ -988,7 +988,7 @@ public final class BulkDeleteProtos {
long getVersionsDeleted();
}
/**
* Protobuf type {@code BulkDeleteResponse}
* Protobuf type {@code hbase.pb.BulkDeleteResponse}
*/
public static final class BulkDeleteResponse extends
com.google.protobuf.GeneratedMessage
@ -1062,12 +1062,12 @@ public final class BulkDeleteProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
}
@ -1289,19 +1289,19 @@ public final class BulkDeleteProtos {
return builder;
}
/**
* Protobuf type {@code BulkDeleteResponse}
* Protobuf type {@code hbase.pb.BulkDeleteResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
}
@ -1339,7 +1339,7 @@ public final class BulkDeleteProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_BulkDeleteResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse getDefaultInstanceForType() {
@ -1485,7 +1485,7 @@ public final class BulkDeleteProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:BulkDeleteResponse)
// @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteResponse)
}
static {
@ -1493,11 +1493,11 @@ public final class BulkDeleteProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:BulkDeleteResponse)
// @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteResponse)
}
/**
* Protobuf service {@code BulkDeleteService}
* Protobuf service {@code hbase.pb.BulkDeleteService}
*/
public static abstract class BulkDeleteService
implements com.google.protobuf.Service {
@ -1505,7 +1505,7 @@ public final class BulkDeleteProtos {
public interface Interface {
/**
* <code>rpc delete(.BulkDeleteRequest) returns (.BulkDeleteResponse);</code>
* <code>rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse);</code>
*/
public abstract void delete(
com.google.protobuf.RpcController controller,
@ -1590,7 +1590,7 @@ public final class BulkDeleteProtos {
}
/**
* <code>rpc delete(.BulkDeleteRequest) returns (.BulkDeleteResponse);</code>
* <code>rpc delete(.hbase.pb.BulkDeleteRequest) returns (.hbase.pb.BulkDeleteResponse);</code>
*/
public abstract void delete(
com.google.protobuf.RpcController controller,
@ -1725,19 +1725,19 @@ public final class BulkDeleteProtos {
}
// @@protoc_insertion_point(class_scope:BulkDeleteService)
// @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteService)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_BulkDeleteRequest_descriptor;
internal_static_hbase_pb_BulkDeleteRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_BulkDeleteRequest_fieldAccessorTable;
internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_BulkDeleteResponse_descriptor;
internal_static_hbase_pb_BulkDeleteResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_BulkDeleteResponse_fieldAccessorTable;
internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -1747,35 +1747,36 @@ public final class BulkDeleteProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\020BulkDelete.proto\032\014Client.proto\"\300\001\n\021Bul" +
"kDeleteRequest\022\023\n\004scan\030\001 \002(\0132\005.Scan\0221\n\nd" +
"eleteType\030\002 \002(\0162\035.BulkDeleteRequest.Dele" +
"teType\022\021\n\ttimestamp\030\003 \001(\004\022\024\n\014rowBatchSiz" +
"e\030\004 \002(\r\":\n\nDeleteType\022\007\n\003ROW\020\000\022\n\n\006FAMILY" +
"\020\001\022\n\n\006COLUMN\020\002\022\013\n\007VERSION\020\003\"B\n\022BulkDelet" +
"eResponse\022\023\n\013rowsDeleted\030\001 \002(\004\022\027\n\017versio" +
"nsDeleted\030\002 \001(\0042F\n\021BulkDeleteService\0221\n\006" +
"delete\022\022.BulkDeleteRequest\032\023.BulkDeleteR" +
"esponseBQ\n5org.apache.hadoop.hbase.copro",
"cessor.example.generatedB\020BulkDeleteProt" +
"osH\001\210\001\001\240\001\001"
"\n\020BulkDelete.proto\022\010hbase.pb\032\014Client.pro" +
"to\"\322\001\n\021BulkDeleteRequest\022\034\n\004scan\030\001 \002(\0132\016" +
".hbase.pb.Scan\022:\n\ndeleteType\030\002 \002(\0162&.hba" +
"se.pb.BulkDeleteRequest.DeleteType\022\021\n\tti" +
"mestamp\030\003 \001(\004\022\024\n\014rowBatchSize\030\004 \002(\r\":\n\nD" +
"eleteType\022\007\n\003ROW\020\000\022\n\n\006FAMILY\020\001\022\n\n\006COLUMN" +
"\020\002\022\013\n\007VERSION\020\003\"B\n\022BulkDeleteResponse\022\023\n" +
"\013rowsDeleted\030\001 \002(\004\022\027\n\017versionsDeleted\030\002 " +
"\001(\0042X\n\021BulkDeleteService\022C\n\006delete\022\033.hba" +
"se.pb.BulkDeleteRequest\032\034.hbase.pb.BulkD",
"eleteResponseBQ\n5org.apache.hadoop.hbase" +
".coprocessor.example.generatedB\020BulkDele" +
"teProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_BulkDeleteRequest_descriptor =
internal_static_hbase_pb_BulkDeleteRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_BulkDeleteRequest_fieldAccessorTable = new
internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_BulkDeleteRequest_descriptor,
internal_static_hbase_pb_BulkDeleteRequest_descriptor,
new java.lang.String[] { "Scan", "DeleteType", "Timestamp", "RowBatchSize", });
internal_static_BulkDeleteResponse_descriptor =
internal_static_hbase_pb_BulkDeleteResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_BulkDeleteResponse_fieldAccessorTable = new
internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_BulkDeleteResponse_descriptor,
internal_static_hbase_pb_BulkDeleteResponse_descriptor,
new java.lang.String[] { "RowsDeleted", "VersionsDeleted", });
return null;
}

View File

@ -12,7 +12,7 @@ public final class ExampleProtos {
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code CountRequest}
* Protobuf type {@code hbase.pb.CountRequest}
*/
public static final class CountRequest extends
com.google.protobuf.GeneratedMessage
@ -75,12 +75,12 @@ public final class ExampleProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
}
@ -231,19 +231,19 @@ public final class ExampleProtos {
return builder;
}
/**
* Protobuf type {@code CountRequest}
* Protobuf type {@code hbase.pb.CountRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest.Builder.class);
}
@ -277,7 +277,7 @@ public final class ExampleProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountRequest_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountRequest getDefaultInstanceForType() {
@ -335,7 +335,7 @@ public final class ExampleProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:CountRequest)
// @@protoc_insertion_point(builder_scope:hbase.pb.CountRequest)
}
static {
@ -343,7 +343,7 @@ public final class ExampleProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:CountRequest)
// @@protoc_insertion_point(class_scope:hbase.pb.CountRequest)
}
public interface CountResponseOrBuilder
@ -360,7 +360,7 @@ public final class ExampleProtos {
long getCount();
}
/**
* Protobuf type {@code CountResponse}
* Protobuf type {@code hbase.pb.CountResponse}
*/
public static final class CountResponse extends
com.google.protobuf.GeneratedMessage
@ -429,12 +429,12 @@ public final class ExampleProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
}
@ -623,19 +623,19 @@ public final class ExampleProtos {
return builder;
}
/**
* Protobuf type {@code CountResponse}
* Protobuf type {@code hbase.pb.CountResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_fieldAccessorTable
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.class, org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse.Builder.class);
}
@ -671,7 +671,7 @@ public final class ExampleProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_CountResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.internal_static_hbase_pb_CountResponse_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse getDefaultInstanceForType() {
@ -777,7 +777,7 @@ public final class ExampleProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:CountResponse)
// @@protoc_insertion_point(builder_scope:hbase.pb.CountResponse)
}
static {
@ -785,11 +785,11 @@ public final class ExampleProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:CountResponse)
// @@protoc_insertion_point(class_scope:hbase.pb.CountResponse)
}
/**
* Protobuf service {@code RowCountService}
* Protobuf service {@code hbase.pb.RowCountService}
*/
public static abstract class RowCountService
implements com.google.protobuf.Service {
@ -797,7 +797,7 @@ public final class ExampleProtos {
public interface Interface {
/**
* <code>rpc getRowCount(.CountRequest) returns (.CountResponse);</code>
* <code>rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);</code>
*/
public abstract void getRowCount(
com.google.protobuf.RpcController controller,
@ -805,7 +805,7 @@ public final class ExampleProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done);
/**
* <code>rpc getKeyValueCount(.CountRequest) returns (.CountResponse);</code>
* <code>rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);</code>
*/
public abstract void getKeyValueCount(
com.google.protobuf.RpcController controller,
@ -904,7 +904,7 @@ public final class ExampleProtos {
}
/**
* <code>rpc getRowCount(.CountRequest) returns (.CountResponse);</code>
* <code>rpc getRowCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);</code>
*/
public abstract void getRowCount(
com.google.protobuf.RpcController controller,
@ -912,7 +912,7 @@ public final class ExampleProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.ExampleProtos.CountResponse> done);
/**
* <code>rpc getKeyValueCount(.CountRequest) returns (.CountResponse);</code>
* <code>rpc getKeyValueCount(.hbase.pb.CountRequest) returns (.hbase.pb.CountResponse);</code>
*/
public abstract void getKeyValueCount(
com.google.protobuf.RpcController controller,
@ -1088,19 +1088,19 @@ public final class ExampleProtos {
}
// @@protoc_insertion_point(class_scope:RowCountService)
// @@protoc_insertion_point(class_scope:hbase.pb.RowCountService)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_CountRequest_descriptor;
internal_static_hbase_pb_CountRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_CountRequest_fieldAccessorTable;
internal_static_hbase_pb_CountRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_CountResponse_descriptor;
internal_static_hbase_pb_CountResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_CountResponse_fieldAccessorTable;
internal_static_hbase_pb_CountResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -1110,30 +1110,31 @@ public final class ExampleProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\016Examples.proto\"\016\n\014CountRequest\"!\n\rCoun" +
"tResponse\022\020\n\005count\030\001 \002(\003:\00102r\n\017RowCountS" +
"ervice\022,\n\013getRowCount\022\r.CountRequest\032\016.C" +
"ountResponse\0221\n\020getKeyValueCount\022\r.Count" +
"Request\032\016.CountResponseBN\n5org.apache.ha" +
"doop.hbase.coprocessor.example.generated" +
"B\rExampleProtosH\001\210\001\001\240\001\001"
"\n\016Examples.proto\022\010hbase.pb\"\016\n\014CountReque" +
"st\"!\n\rCountResponse\022\020\n\005count\030\001 \002(\003:\00102\226\001" +
"\n\017RowCountService\022>\n\013getRowCount\022\026.hbase" +
".pb.CountRequest\032\027.hbase.pb.CountRespons" +
"e\022C\n\020getKeyValueCount\022\026.hbase.pb.CountRe" +
"quest\032\027.hbase.pb.CountResponseBN\n5org.ap" +
"ache.hadoop.hbase.coprocessor.example.ge" +
"neratedB\rExampleProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_CountRequest_descriptor =
internal_static_hbase_pb_CountRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_CountRequest_fieldAccessorTable = new
internal_static_hbase_pb_CountRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CountRequest_descriptor,
internal_static_hbase_pb_CountRequest_descriptor,
new java.lang.String[] { });
internal_static_CountResponse_descriptor =
internal_static_hbase_pb_CountResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_CountResponse_fieldAccessorTable = new
internal_static_hbase_pb_CountResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CountResponse_descriptor,
internal_static_hbase_pb_CountResponse_descriptor,
new java.lang.String[] { "Count", });
return null;
}

View File

@ -16,6 +16,8 @@
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.coprocessor.example.generated";
option java_outer_classname = "BulkDeleteProtos";
option java_generic_services = true;

View File

@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.coprocessor.example.generated";
option java_outer_classname = "ExampleProtos";

View File

@ -50,17 +50,17 @@ public final class AggregateProtos {
com.google.protobuf.ByteString
getInterpreterClassNameBytes();
// required .Scan scan = 2;
// required .hbase.pb.Scan scan = 2;
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
boolean hasScan();
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder();
@ -75,7 +75,7 @@ public final class AggregateProtos {
com.google.protobuf.ByteString getInterpreterSpecificBytes();
}
/**
* Protobuf type {@code AggregateRequest}
* Protobuf type {@code hbase.pb.AggregateRequest}
*/
public static final class AggregateRequest extends
com.google.protobuf.GeneratedMessage
@ -162,12 +162,12 @@ public final class AggregateProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class);
}
@ -255,23 +255,23 @@ public final class AggregateProtos {
}
}
// required .Scan scan = 2;
// required .hbase.pb.Scan scan = 2;
public static final int SCAN_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public boolean hasScan() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
return scan_;
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
return scan_;
@ -487,19 +487,19 @@ public final class AggregateProtos {
return builder;
}
/**
* Protobuf type {@code AggregateRequest}
* Protobuf type {@code hbase.pb.AggregateRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest.Builder.class);
}
@ -544,7 +544,7 @@ public final class AggregateProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateRequest getDefaultInstanceForType() {
@ -767,18 +767,18 @@ public final class AggregateProtos {
return this;
}
// required .Scan scan = 2;
// required .hbase.pb.Scan scan = 2;
private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_;
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public boolean hasScan() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() {
if (scanBuilder_ == null) {
@ -788,7 +788,7 @@ public final class AggregateProtos {
}
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
@ -804,7 +804,7 @@ public final class AggregateProtos {
return this;
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public Builder setScan(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) {
@ -818,7 +818,7 @@ public final class AggregateProtos {
return this;
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
if (scanBuilder_ == null) {
@ -837,7 +837,7 @@ public final class AggregateProtos {
return this;
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public Builder clearScan() {
if (scanBuilder_ == null) {
@ -850,7 +850,7 @@ public final class AggregateProtos {
return this;
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() {
bitField0_ |= 0x00000002;
@ -858,7 +858,7 @@ public final class AggregateProtos {
return getScanFieldBuilder().getBuilder();
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() {
if (scanBuilder_ != null) {
@ -868,7 +868,7 @@ public final class AggregateProtos {
}
}
/**
* <code>required .Scan scan = 2;</code>
* <code>required .hbase.pb.Scan scan = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
@ -920,7 +920,7 @@ public final class AggregateProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:AggregateRequest)
// @@protoc_insertion_point(builder_scope:hbase.pb.AggregateRequest)
}
static {
@ -928,7 +928,7 @@ public final class AggregateProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:AggregateRequest)
// @@protoc_insertion_point(class_scope:hbase.pb.AggregateRequest)
}
public interface AggregateResponseOrBuilder
@ -986,7 +986,7 @@ public final class AggregateProtos {
com.google.protobuf.ByteString getSecondPart();
}
/**
* Protobuf type {@code AggregateResponse}
* Protobuf type {@code hbase.pb.AggregateResponse}
*/
public static final class AggregateResponse extends
com.google.protobuf.GeneratedMessage
@ -1066,12 +1066,12 @@ public final class AggregateProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class);
}
@ -1325,19 +1325,19 @@ public final class AggregateProtos {
return builder;
}
/**
* Protobuf type {@code AggregateResponse}
* Protobuf type {@code hbase.pb.AggregateResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.class, org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse.Builder.class);
}
@ -1375,7 +1375,7 @@ public final class AggregateProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_AggregateResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.internal_static_hbase_pb_AggregateResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse getDefaultInstanceForType() {
@ -1630,7 +1630,7 @@ public final class AggregateProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:AggregateResponse)
// @@protoc_insertion_point(builder_scope:hbase.pb.AggregateResponse)
}
static {
@ -1638,11 +1638,11 @@ public final class AggregateProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:AggregateResponse)
// @@protoc_insertion_point(class_scope:hbase.pb.AggregateResponse)
}
/**
* Protobuf service {@code AggregateService}
* Protobuf service {@code hbase.pb.AggregateService}
*
* <pre>
** Refer to the AggregateImplementation class for an overview of the
@ -1655,7 +1655,7 @@ public final class AggregateProtos {
public interface Interface {
/**
* <code>rpc GetMax(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getMax(
com.google.protobuf.RpcController controller,
@ -1663,7 +1663,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetMin(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getMin(
com.google.protobuf.RpcController controller,
@ -1671,7 +1671,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetSum(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getSum(
com.google.protobuf.RpcController controller,
@ -1679,7 +1679,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetRowNum(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getRowNum(
com.google.protobuf.RpcController controller,
@ -1687,7 +1687,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetAvg(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getAvg(
com.google.protobuf.RpcController controller,
@ -1695,7 +1695,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetStd(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getStd(
com.google.protobuf.RpcController controller,
@ -1703,7 +1703,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetMedian(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getMedian(
com.google.protobuf.RpcController controller,
@ -1872,7 +1872,7 @@ public final class AggregateProtos {
}
/**
* <code>rpc GetMax(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetMax(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getMax(
com.google.protobuf.RpcController controller,
@ -1880,7 +1880,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetMin(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetMin(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getMin(
com.google.protobuf.RpcController controller,
@ -1888,7 +1888,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetSum(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetSum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getSum(
com.google.protobuf.RpcController controller,
@ -1896,7 +1896,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetRowNum(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetRowNum(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getRowNum(
com.google.protobuf.RpcController controller,
@ -1904,7 +1904,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetAvg(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetAvg(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getAvg(
com.google.protobuf.RpcController controller,
@ -1912,7 +1912,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetStd(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetStd(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getStd(
com.google.protobuf.RpcController controller,
@ -1920,7 +1920,7 @@ public final class AggregateProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AggregateProtos.AggregateResponse> done);
/**
* <code>rpc GetMedian(.AggregateRequest) returns (.AggregateResponse);</code>
* <code>rpc GetMedian(.hbase.pb.AggregateRequest) returns (.hbase.pb.AggregateResponse);</code>
*/
public abstract void getMedian(
com.google.protobuf.RpcController controller,
@ -2301,19 +2301,19 @@ public final class AggregateProtos {
}
// @@protoc_insertion_point(class_scope:AggregateService)
// @@protoc_insertion_point(class_scope:hbase.pb.AggregateService)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_AggregateRequest_descriptor;
internal_static_hbase_pb_AggregateRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_AggregateRequest_fieldAccessorTable;
internal_static_hbase_pb_AggregateRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_AggregateResponse_descriptor;
internal_static_hbase_pb_AggregateResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_AggregateResponse_fieldAccessorTable;
internal_static_hbase_pb_AggregateResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -2323,40 +2323,43 @@ public final class AggregateProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\017Aggregate.proto\032\014Client.proto\"k\n\020Aggre" +
"gateRequest\022\036\n\026interpreter_class_name\030\001 " +
"\002(\t\022\023\n\004scan\030\002 \002(\0132\005.Scan\022\"\n\032interpreter_" +
"specific_bytes\030\003 \001(\014\"<\n\021AggregateRespons" +
"e\022\022\n\nfirst_part\030\001 \003(\014\022\023\n\013second_part\030\002 \001" +
"(\0142\357\002\n\020AggregateService\022/\n\006GetMax\022\021.Aggr" +
"egateRequest\032\022.AggregateResponse\022/\n\006GetM" +
"in\022\021.AggregateRequest\032\022.AggregateRespons" +
"e\022/\n\006GetSum\022\021.AggregateRequest\032\022.Aggrega" +
"teResponse\0222\n\tGetRowNum\022\021.AggregateReque",
"st\032\022.AggregateResponse\022/\n\006GetAvg\022\021.Aggre" +
"gateRequest\032\022.AggregateResponse\022/\n\006GetSt" +
"d\022\021.AggregateRequest\032\022.AggregateResponse" +
"\0222\n\tGetMedian\022\021.AggregateRequest\032\022.Aggre" +
"gateResponseBE\n*org.apache.hadoop.hbase." +
"protobuf.generatedB\017AggregateProtosH\001\210\001\001" +
"\240\001\001"
"\n\017Aggregate.proto\022\010hbase.pb\032\014Client.prot" +
"o\"t\n\020AggregateRequest\022\036\n\026interpreter_cla" +
"ss_name\030\001 \002(\t\022\034\n\004scan\030\002 \002(\0132\016.hbase.pb.S" +
"can\022\"\n\032interpreter_specific_bytes\030\003 \001(\014\"" +
"<\n\021AggregateResponse\022\022\n\nfirst_part\030\001 \003(\014" +
"\022\023\n\013second_part\030\002 \001(\0142\355\003\n\020AggregateServi" +
"ce\022A\n\006GetMax\022\032.hbase.pb.AggregateRequest" +
"\032\033.hbase.pb.AggregateResponse\022A\n\006GetMin\022" +
"\032.hbase.pb.AggregateRequest\032\033.hbase.pb.A" +
"ggregateResponse\022A\n\006GetSum\022\032.hbase.pb.Ag",
"gregateRequest\032\033.hbase.pb.AggregateRespo" +
"nse\022D\n\tGetRowNum\022\032.hbase.pb.AggregateReq" +
"uest\032\033.hbase.pb.AggregateResponse\022A\n\006Get" +
"Avg\022\032.hbase.pb.AggregateRequest\032\033.hbase." +
"pb.AggregateResponse\022A\n\006GetStd\022\032.hbase.p" +
"b.AggregateRequest\032\033.hbase.pb.AggregateR" +
"esponse\022D\n\tGetMedian\022\032.hbase.pb.Aggregat" +
"eRequest\032\033.hbase.pb.AggregateResponseBE\n" +
"*org.apache.hadoop.hbase.protobuf.genera" +
"tedB\017AggregateProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_AggregateRequest_descriptor =
internal_static_hbase_pb_AggregateRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_AggregateRequest_fieldAccessorTable = new
internal_static_hbase_pb_AggregateRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_AggregateRequest_descriptor,
internal_static_hbase_pb_AggregateRequest_descriptor,
new java.lang.String[] { "InterpreterClassName", "Scan", "InterpreterSpecificBytes", });
internal_static_AggregateResponse_descriptor =
internal_static_hbase_pb_AggregateResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_AggregateResponse_fieldAccessorTable = new
internal_static_hbase_pb_AggregateResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_AggregateResponse_descriptor,
internal_static_hbase_pb_AggregateResponse_descriptor,
new java.lang.String[] { "FirstPart", "SecondPart", });
return null;
}

View File

@ -42,7 +42,7 @@ public final class AuthenticationProtos {
com.google.protobuf.ByteString getKey();
}
/**
* Protobuf type {@code AuthenticationKey}
* Protobuf type {@code hbase.pb.AuthenticationKey}
*/
public static final class AuthenticationKey extends
com.google.protobuf.GeneratedMessage
@ -121,12 +121,12 @@ public final class AuthenticationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_AuthenticationKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_AuthenticationKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.Builder.class);
}
@ -389,19 +389,19 @@ public final class AuthenticationProtos {
return builder;
}
/**
* Protobuf type {@code AuthenticationKey}
* Protobuf type {@code hbase.pb.AuthenticationKey}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKeyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_AuthenticationKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_AuthenticationKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey.Builder.class);
}
@ -441,7 +441,7 @@ public final class AuthenticationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_AuthenticationKey_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_AuthenticationKey_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.AuthenticationKey getDefaultInstanceForType() {
@ -638,7 +638,7 @@ public final class AuthenticationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:AuthenticationKey)
// @@protoc_insertion_point(builder_scope:hbase.pb.AuthenticationKey)
}
static {
@ -646,19 +646,19 @@ public final class AuthenticationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:AuthenticationKey)
// @@protoc_insertion_point(class_scope:hbase.pb.AuthenticationKey)
}
public interface TokenIdentifierOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required .TokenIdentifier.Kind kind = 1;
// required .hbase.pb.TokenIdentifier.Kind kind = 1;
/**
* <code>required .TokenIdentifier.Kind kind = 1;</code>
* <code>required .hbase.pb.TokenIdentifier.Kind kind = 1;</code>
*/
boolean hasKind();
/**
* <code>required .TokenIdentifier.Kind kind = 1;</code>
* <code>required .hbase.pb.TokenIdentifier.Kind kind = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind getKind();
@ -713,7 +713,7 @@ public final class AuthenticationProtos {
long getSequenceNumber();
}
/**
* Protobuf type {@code TokenIdentifier}
* Protobuf type {@code hbase.pb.TokenIdentifier}
*/
public static final class TokenIdentifier extends
com.google.protobuf.GeneratedMessage
@ -813,12 +813,12 @@ public final class AuthenticationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_TokenIdentifier_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_TokenIdentifier_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Builder.class);
}
@ -839,7 +839,7 @@ public final class AuthenticationProtos {
}
/**
* Protobuf enum {@code TokenIdentifier.Kind}
* Protobuf enum {@code hbase.pb.TokenIdentifier.Kind}
*/
public enum Kind
implements com.google.protobuf.ProtocolMessageEnum {
@ -908,21 +908,21 @@ public final class AuthenticationProtos {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:TokenIdentifier.Kind)
// @@protoc_insertion_point(enum_scope:hbase.pb.TokenIdentifier.Kind)
}
private int bitField0_;
// required .TokenIdentifier.Kind kind = 1;
// required .hbase.pb.TokenIdentifier.Kind kind = 1;
public static final int KIND_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind kind_;
/**
* <code>required .TokenIdentifier.Kind kind = 1;</code>
* <code>required .hbase.pb.TokenIdentifier.Kind kind = 1;</code>
*/
public boolean hasKind() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .TokenIdentifier.Kind kind = 1;</code>
* <code>required .hbase.pb.TokenIdentifier.Kind kind = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind getKind() {
return kind_;
@ -1253,19 +1253,19 @@ public final class AuthenticationProtos {
return builder;
}
/**
* Protobuf type {@code TokenIdentifier}
* Protobuf type {@code hbase.pb.TokenIdentifier}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifierOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_TokenIdentifier_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_TokenIdentifier_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Builder.class);
}
@ -1311,7 +1311,7 @@ public final class AuthenticationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_TokenIdentifier_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_TokenIdentifier_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier getDefaultInstanceForType() {
@ -1427,22 +1427,22 @@ public final class AuthenticationProtos {
}
private int bitField0_;
// required .TokenIdentifier.Kind kind = 1;
// required .hbase.pb.TokenIdentifier.Kind kind = 1;
private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind kind_ = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN;
/**
* <code>required .TokenIdentifier.Kind kind = 1;</code>
* <code>required .hbase.pb.TokenIdentifier.Kind kind = 1;</code>
*/
public boolean hasKind() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .TokenIdentifier.Kind kind = 1;</code>
* <code>required .hbase.pb.TokenIdentifier.Kind kind = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind getKind() {
return kind_;
}
/**
* <code>required .TokenIdentifier.Kind kind = 1;</code>
* <code>required .hbase.pb.TokenIdentifier.Kind kind = 1;</code>
*/
public Builder setKind(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenIdentifier.Kind value) {
if (value == null) {
@ -1454,7 +1454,7 @@ public final class AuthenticationProtos {
return this;
}
/**
* <code>required .TokenIdentifier.Kind kind = 1;</code>
* <code>required .hbase.pb.TokenIdentifier.Kind kind = 1;</code>
*/
public Builder clearKind() {
bitField0_ = (bitField0_ & ~0x00000001);
@ -1631,7 +1631,7 @@ public final class AuthenticationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:TokenIdentifier)
// @@protoc_insertion_point(builder_scope:hbase.pb.TokenIdentifier)
}
static {
@ -1639,7 +1639,7 @@ public final class AuthenticationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:TokenIdentifier)
// @@protoc_insertion_point(class_scope:hbase.pb.TokenIdentifier)
}
public interface TokenOrBuilder
@ -1688,7 +1688,7 @@ public final class AuthenticationProtos {
com.google.protobuf.ByteString getService();
}
/**
* Protobuf type {@code Token}
* Protobuf type {@code hbase.pb.Token}
*
* <pre>
* Serialization of the org.apache.hadoop.security.token.Token class
@ -1772,12 +1772,12 @@ public final class AuthenticationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_Token_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_Token_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder.class);
}
@ -2040,7 +2040,7 @@ public final class AuthenticationProtos {
return builder;
}
/**
* Protobuf type {@code Token}
* Protobuf type {@code hbase.pb.Token}
*
* <pre>
* Serialization of the org.apache.hadoop.security.token.Token class
@ -2052,12 +2052,12 @@ public final class AuthenticationProtos {
implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_Token_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_Token_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder.class);
}
@ -2097,7 +2097,7 @@ public final class AuthenticationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_Token_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_Token_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token getDefaultInstanceForType() {
@ -2312,7 +2312,7 @@ public final class AuthenticationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:Token)
// @@protoc_insertion_point(builder_scope:hbase.pb.Token)
}
static {
@ -2320,14 +2320,14 @@ public final class AuthenticationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:Token)
// @@protoc_insertion_point(class_scope:hbase.pb.Token)
}
public interface GetAuthenticationTokenRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code GetAuthenticationTokenRequest}
* Protobuf type {@code hbase.pb.GetAuthenticationTokenRequest}
*
* <pre>
* RPC request &amp; response messages
@ -2394,12 +2394,12 @@ public final class AuthenticationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.Builder.class);
}
@ -2550,7 +2550,7 @@ public final class AuthenticationProtos {
return builder;
}
/**
* Protobuf type {@code GetAuthenticationTokenRequest}
* Protobuf type {@code hbase.pb.GetAuthenticationTokenRequest}
*
* <pre>
* RPC request &amp; response messages
@ -2561,12 +2561,12 @@ public final class AuthenticationProtos {
implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest.Builder.class);
}
@ -2600,7 +2600,7 @@ public final class AuthenticationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenRequest getDefaultInstanceForType() {
@ -2658,7 +2658,7 @@ public final class AuthenticationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:GetAuthenticationTokenRequest)
// @@protoc_insertion_point(builder_scope:hbase.pb.GetAuthenticationTokenRequest)
}
static {
@ -2666,28 +2666,28 @@ public final class AuthenticationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:GetAuthenticationTokenRequest)
// @@protoc_insertion_point(class_scope:hbase.pb.GetAuthenticationTokenRequest)
}
public interface GetAuthenticationTokenResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional .Token token = 1;
// optional .hbase.pb.Token token = 1;
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
boolean hasToken();
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token getToken();
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder getTokenOrBuilder();
}
/**
* Protobuf type {@code GetAuthenticationTokenResponse}
* Protobuf type {@code hbase.pb.GetAuthenticationTokenResponse}
*/
public static final class GetAuthenticationTokenResponse extends
com.google.protobuf.GeneratedMessage
@ -2764,12 +2764,12 @@ public final class AuthenticationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.Builder.class);
}
@ -2790,23 +2790,23 @@ public final class AuthenticationProtos {
}
private int bitField0_;
// optional .Token token = 1;
// optional .hbase.pb.Token token = 1;
public static final int TOKEN_FIELD_NUMBER = 1;
private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token token_;
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public boolean hasToken() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token getToken() {
return token_;
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder getTokenOrBuilder() {
return token_;
@ -2960,19 +2960,19 @@ public final class AuthenticationProtos {
return builder;
}
/**
* Protobuf type {@code GetAuthenticationTokenResponse}
* Protobuf type {@code hbase.pb.GetAuthenticationTokenResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse.Builder.class);
}
@ -3013,7 +3013,7 @@ public final class AuthenticationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_GetAuthenticationTokenResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_GetAuthenticationTokenResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse getDefaultInstanceForType() {
@ -3086,18 +3086,18 @@ public final class AuthenticationProtos {
}
private int bitField0_;
// optional .Token token = 1;
// optional .hbase.pb.Token token = 1;
private org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token token_ = org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder> tokenBuilder_;
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public boolean hasToken() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token getToken() {
if (tokenBuilder_ == null) {
@ -3107,7 +3107,7 @@ public final class AuthenticationProtos {
}
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public Builder setToken(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token value) {
if (tokenBuilder_ == null) {
@ -3123,7 +3123,7 @@ public final class AuthenticationProtos {
return this;
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public Builder setToken(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder builderForValue) {
@ -3137,7 +3137,7 @@ public final class AuthenticationProtos {
return this;
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public Builder mergeToken(org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token value) {
if (tokenBuilder_ == null) {
@ -3156,7 +3156,7 @@ public final class AuthenticationProtos {
return this;
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public Builder clearToken() {
if (tokenBuilder_ == null) {
@ -3169,7 +3169,7 @@ public final class AuthenticationProtos {
return this;
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder getTokenBuilder() {
bitField0_ |= 0x00000001;
@ -3177,7 +3177,7 @@ public final class AuthenticationProtos {
return getTokenFieldBuilder().getBuilder();
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder getTokenOrBuilder() {
if (tokenBuilder_ != null) {
@ -3187,7 +3187,7 @@ public final class AuthenticationProtos {
}
}
/**
* <code>optional .Token token = 1;</code>
* <code>optional .hbase.pb.Token token = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.Token.Builder, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.TokenOrBuilder>
@ -3203,7 +3203,7 @@ public final class AuthenticationProtos {
return tokenBuilder_;
}
// @@protoc_insertion_point(builder_scope:GetAuthenticationTokenResponse)
// @@protoc_insertion_point(builder_scope:hbase.pb.GetAuthenticationTokenResponse)
}
static {
@ -3211,14 +3211,14 @@ public final class AuthenticationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:GetAuthenticationTokenResponse)
// @@protoc_insertion_point(class_scope:hbase.pb.GetAuthenticationTokenResponse)
}
public interface WhoAmIRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code WhoAmIRequest}
* Protobuf type {@code hbase.pb.WhoAmIRequest}
*/
public static final class WhoAmIRequest extends
com.google.protobuf.GeneratedMessage
@ -3281,12 +3281,12 @@ public final class AuthenticationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.Builder.class);
}
@ -3437,19 +3437,19 @@ public final class AuthenticationProtos {
return builder;
}
/**
* Protobuf type {@code WhoAmIRequest}
* Protobuf type {@code hbase.pb.WhoAmIRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest.Builder.class);
}
@ -3483,7 +3483,7 @@ public final class AuthenticationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIRequest getDefaultInstanceForType() {
@ -3541,7 +3541,7 @@ public final class AuthenticationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:WhoAmIRequest)
// @@protoc_insertion_point(builder_scope:hbase.pb.WhoAmIRequest)
}
static {
@ -3549,7 +3549,7 @@ public final class AuthenticationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:WhoAmIRequest)
// @@protoc_insertion_point(class_scope:hbase.pb.WhoAmIRequest)
}
public interface WhoAmIResponseOrBuilder
@ -3586,7 +3586,7 @@ public final class AuthenticationProtos {
getAuthMethodBytes();
}
/**
* Protobuf type {@code WhoAmIResponse}
* Protobuf type {@code hbase.pb.WhoAmIResponse}
*/
public static final class WhoAmIResponse extends
com.google.protobuf.GeneratedMessage
@ -3660,12 +3660,12 @@ public final class AuthenticationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.Builder.class);
}
@ -3937,19 +3937,19 @@ public final class AuthenticationProtos {
return builder;
}
/**
* Protobuf type {@code WhoAmIResponse}
* Protobuf type {@code hbase.pb.WhoAmIResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.class, org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse.Builder.class);
}
@ -3987,7 +3987,7 @@ public final class AuthenticationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_WhoAmIResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.internal_static_hbase_pb_WhoAmIResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.WhoAmIResponse getDefaultInstanceForType() {
@ -4215,7 +4215,7 @@ public final class AuthenticationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:WhoAmIResponse)
// @@protoc_insertion_point(builder_scope:hbase.pb.WhoAmIResponse)
}
static {
@ -4223,11 +4223,11 @@ public final class AuthenticationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:WhoAmIResponse)
// @@protoc_insertion_point(class_scope:hbase.pb.WhoAmIResponse)
}
/**
* Protobuf service {@code AuthenticationService}
* Protobuf service {@code hbase.pb.AuthenticationService}
*
* <pre>
* RPC service
@ -4239,7 +4239,7 @@ public final class AuthenticationProtos {
public interface Interface {
/**
* <code>rpc GetAuthenticationToken(.GetAuthenticationTokenRequest) returns (.GetAuthenticationTokenResponse);</code>
* <code>rpc GetAuthenticationToken(.hbase.pb.GetAuthenticationTokenRequest) returns (.hbase.pb.GetAuthenticationTokenResponse);</code>
*/
public abstract void getAuthenticationToken(
com.google.protobuf.RpcController controller,
@ -4247,7 +4247,7 @@ public final class AuthenticationProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse> done);
/**
* <code>rpc WhoAmI(.WhoAmIRequest) returns (.WhoAmIResponse);</code>
* <code>rpc WhoAmI(.hbase.pb.WhoAmIRequest) returns (.hbase.pb.WhoAmIResponse);</code>
*/
public abstract void whoAmI(
com.google.protobuf.RpcController controller,
@ -4346,7 +4346,7 @@ public final class AuthenticationProtos {
}
/**
* <code>rpc GetAuthenticationToken(.GetAuthenticationTokenRequest) returns (.GetAuthenticationTokenResponse);</code>
* <code>rpc GetAuthenticationToken(.hbase.pb.GetAuthenticationTokenRequest) returns (.hbase.pb.GetAuthenticationTokenResponse);</code>
*/
public abstract void getAuthenticationToken(
com.google.protobuf.RpcController controller,
@ -4354,7 +4354,7 @@ public final class AuthenticationProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos.GetAuthenticationTokenResponse> done);
/**
* <code>rpc WhoAmI(.WhoAmIRequest) returns (.WhoAmIResponse);</code>
* <code>rpc WhoAmI(.hbase.pb.WhoAmIRequest) returns (.hbase.pb.WhoAmIResponse);</code>
*/
public abstract void whoAmI(
com.google.protobuf.RpcController controller,
@ -4530,44 +4530,44 @@ public final class AuthenticationProtos {
}
// @@protoc_insertion_point(class_scope:AuthenticationService)
// @@protoc_insertion_point(class_scope:hbase.pb.AuthenticationService)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_AuthenticationKey_descriptor;
internal_static_hbase_pb_AuthenticationKey_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_AuthenticationKey_fieldAccessorTable;
internal_static_hbase_pb_AuthenticationKey_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_TokenIdentifier_descriptor;
internal_static_hbase_pb_TokenIdentifier_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_TokenIdentifier_fieldAccessorTable;
internal_static_hbase_pb_TokenIdentifier_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_Token_descriptor;
internal_static_hbase_pb_Token_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_Token_fieldAccessorTable;
internal_static_hbase_pb_Token_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_GetAuthenticationTokenRequest_descriptor;
internal_static_hbase_pb_GetAuthenticationTokenRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_GetAuthenticationTokenRequest_fieldAccessorTable;
internal_static_hbase_pb_GetAuthenticationTokenRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_GetAuthenticationTokenResponse_descriptor;
internal_static_hbase_pb_GetAuthenticationTokenResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_GetAuthenticationTokenResponse_fieldAccessorTable;
internal_static_hbase_pb_GetAuthenticationTokenResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_WhoAmIRequest_descriptor;
internal_static_hbase_pb_WhoAmIRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_WhoAmIRequest_fieldAccessorTable;
internal_static_hbase_pb_WhoAmIRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_WhoAmIResponse_descriptor;
internal_static_hbase_pb_WhoAmIResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_WhoAmIResponse_fieldAccessorTable;
internal_static_hbase_pb_WhoAmIResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -4577,72 +4577,74 @@ public final class AuthenticationProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\024Authentication.proto\"E\n\021Authentication" +
"Key\022\n\n\002id\030\001 \002(\005\022\027\n\017expiration_date\030\002 \002(\003" +
"\022\013\n\003key\030\003 \002(\014\"\274\001\n\017TokenIdentifier\022#\n\004kin" +
"d\030\001 \002(\0162\025.TokenIdentifier.Kind\022\020\n\010userna" +
"me\030\002 \002(\014\022\016\n\006key_id\030\003 \002(\005\022\022\n\nissue_date\030\004" +
" \001(\003\022\027\n\017expiration_date\030\005 \001(\003\022\027\n\017sequenc" +
"e_number\030\006 \001(\003\"\034\n\004Kind\022\024\n\020HBASE_AUTH_TOK" +
"EN\020\000\">\n\005Token\022\022\n\nidentifier\030\001 \001(\014\022\020\n\010pas" +
"sword\030\002 \001(\014\022\017\n\007service\030\003 \001(\014\"\037\n\035GetAuthe" +
"nticationTokenRequest\"7\n\036GetAuthenticati",
"onTokenResponse\022\025\n\005token\030\001 \001(\0132\006.Token\"\017" +
"\n\rWhoAmIRequest\"7\n\016WhoAmIResponse\022\020\n\010use" +
"rname\030\001 \001(\t\022\023\n\013auth_method\030\002 \001(\t2\235\001\n\025Aut" +
"henticationService\022Y\n\026GetAuthenticationT" +
"oken\022\036.GetAuthenticationTokenRequest\032\037.G" +
"etAuthenticationTokenResponse\022)\n\006WhoAmI\022" +
"\016.WhoAmIRequest\032\017.WhoAmIResponseBJ\n*org." +
"apache.hadoop.hbase.protobuf.generatedB\024" +
"AuthenticationProtosH\001\210\001\001\240\001\001"
"\n\024Authentication.proto\022\010hbase.pb\"E\n\021Auth" +
"enticationKey\022\n\n\002id\030\001 \002(\005\022\027\n\017expiration_" +
"date\030\002 \002(\003\022\013\n\003key\030\003 \002(\014\"\305\001\n\017TokenIdentif" +
"ier\022,\n\004kind\030\001 \002(\0162\036.hbase.pb.TokenIdenti" +
"fier.Kind\022\020\n\010username\030\002 \002(\014\022\016\n\006key_id\030\003 " +
"\002(\005\022\022\n\nissue_date\030\004 \001(\003\022\027\n\017expiration_da" +
"te\030\005 \001(\003\022\027\n\017sequence_number\030\006 \001(\003\"\034\n\004Kin" +
"d\022\024\n\020HBASE_AUTH_TOKEN\020\000\">\n\005Token\022\022\n\niden" +
"tifier\030\001 \001(\014\022\020\n\010password\030\002 \001(\014\022\017\n\007servic" +
"e\030\003 \001(\014\"\037\n\035GetAuthenticationTokenRequest",
"\"@\n\036GetAuthenticationTokenResponse\022\036\n\005to" +
"ken\030\001 \001(\0132\017.hbase.pb.Token\"\017\n\rWhoAmIRequ" +
"est\"7\n\016WhoAmIResponse\022\020\n\010username\030\001 \001(\t\022" +
"\023\n\013auth_method\030\002 \001(\t2\301\001\n\025AuthenticationS" +
"ervice\022k\n\026GetAuthenticationToken\022\'.hbase" +
".pb.GetAuthenticationTokenRequest\032(.hbas" +
"e.pb.GetAuthenticationTokenResponse\022;\n\006W" +
"hoAmI\022\027.hbase.pb.WhoAmIRequest\032\030.hbase.p" +
"b.WhoAmIResponseBJ\n*org.apache.hadoop.hb" +
"ase.protobuf.generatedB\024AuthenticationPr",
"otosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_AuthenticationKey_descriptor =
internal_static_hbase_pb_AuthenticationKey_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_AuthenticationKey_fieldAccessorTable = new
internal_static_hbase_pb_AuthenticationKey_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_AuthenticationKey_descriptor,
internal_static_hbase_pb_AuthenticationKey_descriptor,
new java.lang.String[] { "Id", "ExpirationDate", "Key", });
internal_static_TokenIdentifier_descriptor =
internal_static_hbase_pb_TokenIdentifier_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_TokenIdentifier_fieldAccessorTable = new
internal_static_hbase_pb_TokenIdentifier_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_TokenIdentifier_descriptor,
internal_static_hbase_pb_TokenIdentifier_descriptor,
new java.lang.String[] { "Kind", "Username", "KeyId", "IssueDate", "ExpirationDate", "SequenceNumber", });
internal_static_Token_descriptor =
internal_static_hbase_pb_Token_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_Token_fieldAccessorTable = new
internal_static_hbase_pb_Token_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_Token_descriptor,
internal_static_hbase_pb_Token_descriptor,
new java.lang.String[] { "Identifier", "Password", "Service", });
internal_static_GetAuthenticationTokenRequest_descriptor =
internal_static_hbase_pb_GetAuthenticationTokenRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_GetAuthenticationTokenRequest_fieldAccessorTable = new
internal_static_hbase_pb_GetAuthenticationTokenRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_GetAuthenticationTokenRequest_descriptor,
internal_static_hbase_pb_GetAuthenticationTokenRequest_descriptor,
new java.lang.String[] { });
internal_static_GetAuthenticationTokenResponse_descriptor =
internal_static_hbase_pb_GetAuthenticationTokenResponse_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_GetAuthenticationTokenResponse_fieldAccessorTable = new
internal_static_hbase_pb_GetAuthenticationTokenResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_GetAuthenticationTokenResponse_descriptor,
internal_static_hbase_pb_GetAuthenticationTokenResponse_descriptor,
new java.lang.String[] { "Token", });
internal_static_WhoAmIRequest_descriptor =
internal_static_hbase_pb_WhoAmIRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_WhoAmIRequest_fieldAccessorTable = new
internal_static_hbase_pb_WhoAmIRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_WhoAmIRequest_descriptor,
internal_static_hbase_pb_WhoAmIRequest_descriptor,
new java.lang.String[] { });
internal_static_WhoAmIResponse_descriptor =
internal_static_hbase_pb_WhoAmIResponse_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_WhoAmIResponse_fieldAccessorTable = new
internal_static_hbase_pb_WhoAmIResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_WhoAmIResponse_descriptor,
internal_static_hbase_pb_WhoAmIResponse_descriptor,
new java.lang.String[] { "Username", "AuthMethod", });
return null;
}

View File

@ -9,7 +9,7 @@ public final class CellProtos {
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf enum {@code CellType}
* Protobuf enum {@code hbase.pb.CellType}
*
* <pre>
**
@ -136,7 +136,7 @@ public final class CellProtos {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:CellType)
// @@protoc_insertion_point(enum_scope:hbase.pb.CellType)
}
public interface CellOrBuilder
@ -182,13 +182,13 @@ public final class CellProtos {
*/
long getTimestamp();
// optional .CellType cell_type = 5;
// optional .hbase.pb.CellType cell_type = 5;
/**
* <code>optional .CellType cell_type = 5;</code>
* <code>optional .hbase.pb.CellType cell_type = 5;</code>
*/
boolean hasCellType();
/**
* <code>optional .CellType cell_type = 5;</code>
* <code>optional .hbase.pb.CellType cell_type = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType getCellType();
@ -213,7 +213,7 @@ public final class CellProtos {
com.google.protobuf.ByteString getTags();
}
/**
* Protobuf type {@code Cell}
* Protobuf type {@code hbase.pb.Cell}
*
* <pre>
**
@ -323,12 +323,12 @@ public final class CellProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_Cell_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_Cell_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.class, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder.class);
}
@ -413,17 +413,17 @@ public final class CellProtos {
return timestamp_;
}
// optional .CellType cell_type = 5;
// optional .hbase.pb.CellType cell_type = 5;
public static final int CELL_TYPE_FIELD_NUMBER = 5;
private org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType cellType_;
/**
* <code>optional .CellType cell_type = 5;</code>
* <code>optional .hbase.pb.CellType cell_type = 5;</code>
*/
public boolean hasCellType() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .CellType cell_type = 5;</code>
* <code>optional .hbase.pb.CellType cell_type = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType getCellType() {
return cellType_;
@ -711,7 +711,7 @@ public final class CellProtos {
return builder;
}
/**
* Protobuf type {@code Cell}
* Protobuf type {@code hbase.pb.Cell}
*
* <pre>
**
@ -723,12 +723,12 @@ public final class CellProtos {
implements org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_Cell_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_Cell_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.class, org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell.Builder.class);
}
@ -776,7 +776,7 @@ public final class CellProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_Cell_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_Cell_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.Cell getDefaultInstanceForType() {
@ -1028,22 +1028,22 @@ public final class CellProtos {
return this;
}
// optional .CellType cell_type = 5;
// optional .hbase.pb.CellType cell_type = 5;
private org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType cellType_ = org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType.MINIMUM;
/**
* <code>optional .CellType cell_type = 5;</code>
* <code>optional .hbase.pb.CellType cell_type = 5;</code>
*/
public boolean hasCellType() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .CellType cell_type = 5;</code>
* <code>optional .hbase.pb.CellType cell_type = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType getCellType() {
return cellType_;
}
/**
* <code>optional .CellType cell_type = 5;</code>
* <code>optional .hbase.pb.CellType cell_type = 5;</code>
*/
public Builder setCellType(org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType value) {
if (value == null) {
@ -1055,7 +1055,7 @@ public final class CellProtos {
return this;
}
/**
* <code>optional .CellType cell_type = 5;</code>
* <code>optional .hbase.pb.CellType cell_type = 5;</code>
*/
public Builder clearCellType() {
bitField0_ = (bitField0_ & ~0x00000010);
@ -1136,7 +1136,7 @@ public final class CellProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:Cell)
// @@protoc_insertion_point(builder_scope:hbase.pb.Cell)
}
static {
@ -1144,7 +1144,7 @@ public final class CellProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:Cell)
// @@protoc_insertion_point(class_scope:hbase.pb.Cell)
}
public interface KeyValueOrBuilder
@ -1190,13 +1190,13 @@ public final class CellProtos {
*/
long getTimestamp();
// optional .CellType key_type = 5;
// optional .hbase.pb.CellType key_type = 5;
/**
* <code>optional .CellType key_type = 5;</code>
* <code>optional .hbase.pb.CellType key_type = 5;</code>
*/
boolean hasKeyType();
/**
* <code>optional .CellType key_type = 5;</code>
* <code>optional .hbase.pb.CellType key_type = 5;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType getKeyType();
@ -1221,7 +1221,7 @@ public final class CellProtos {
com.google.protobuf.ByteString getTags();
}
/**
* Protobuf type {@code KeyValue}
* Protobuf type {@code hbase.pb.KeyValue}
*
* <pre>
**
@ -1332,12 +1332,12 @@ public final class CellProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_KeyValue_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_KeyValue_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.CellProtos.KeyValue.class, org.apache.hadoop.hbase.protobuf.generated.CellProtos.KeyValue.Builder.class);
}
@ -1422,17 +1422,17 @@ public final class CellProtos {
return timestamp_;
}
// optional .CellType key_type = 5;
// optional .hbase.pb.CellType key_type = 5;
public static final int KEY_TYPE_FIELD_NUMBER = 5;
private org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType keyType_;
/**
* <code>optional .CellType key_type = 5;</code>
* <code>optional .hbase.pb.CellType key_type = 5;</code>
*/
public boolean hasKeyType() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .CellType key_type = 5;</code>
* <code>optional .hbase.pb.CellType key_type = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType getKeyType() {
return keyType_;
@ -1732,7 +1732,7 @@ public final class CellProtos {
return builder;
}
/**
* Protobuf type {@code KeyValue}
* Protobuf type {@code hbase.pb.KeyValue}
*
* <pre>
**
@ -1745,12 +1745,12 @@ public final class CellProtos {
implements org.apache.hadoop.hbase.protobuf.generated.CellProtos.KeyValueOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_KeyValue_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_KeyValue_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.CellProtos.KeyValue.class, org.apache.hadoop.hbase.protobuf.generated.CellProtos.KeyValue.Builder.class);
}
@ -1798,7 +1798,7 @@ public final class CellProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_KeyValue_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.CellProtos.internal_static_hbase_pb_KeyValue_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.KeyValue getDefaultInstanceForType() {
@ -2062,22 +2062,22 @@ public final class CellProtos {
return this;
}
// optional .CellType key_type = 5;
// optional .hbase.pb.CellType key_type = 5;
private org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType keyType_ = org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType.MINIMUM;
/**
* <code>optional .CellType key_type = 5;</code>
* <code>optional .hbase.pb.CellType key_type = 5;</code>
*/
public boolean hasKeyType() {
return ((bitField0_ & 0x00000010) == 0x00000010);
}
/**
* <code>optional .CellType key_type = 5;</code>
* <code>optional .hbase.pb.CellType key_type = 5;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType getKeyType() {
return keyType_;
}
/**
* <code>optional .CellType key_type = 5;</code>
* <code>optional .hbase.pb.CellType key_type = 5;</code>
*/
public Builder setKeyType(org.apache.hadoop.hbase.protobuf.generated.CellProtos.CellType value) {
if (value == null) {
@ -2089,7 +2089,7 @@ public final class CellProtos {
return this;
}
/**
* <code>optional .CellType key_type = 5;</code>
* <code>optional .hbase.pb.CellType key_type = 5;</code>
*/
public Builder clearKeyType() {
bitField0_ = (bitField0_ & ~0x00000010);
@ -2170,7 +2170,7 @@ public final class CellProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:KeyValue)
// @@protoc_insertion_point(builder_scope:hbase.pb.KeyValue)
}
static {
@ -2178,19 +2178,19 @@ public final class CellProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:KeyValue)
// @@protoc_insertion_point(class_scope:hbase.pb.KeyValue)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_Cell_descriptor;
internal_static_hbase_pb_Cell_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_Cell_fieldAccessorTable;
internal_static_hbase_pb_Cell_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_KeyValue_descriptor;
internal_static_hbase_pb_KeyValue_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_KeyValue_fieldAccessorTable;
internal_static_hbase_pb_KeyValue_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -2200,35 +2200,35 @@ public final class CellProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\nCell.proto\"\204\001\n\004Cell\022\013\n\003row\030\001 \001(\014\022\016\n\006fa" +
"mily\030\002 \001(\014\022\021\n\tqualifier\030\003 \001(\014\022\021\n\ttimesta" +
"mp\030\004 \001(\004\022\034\n\tcell_type\030\005 \001(\0162\t.CellType\022\r" +
"\n\005value\030\006 \001(\014\022\014\n\004tags\030\007 \001(\014\"\207\001\n\010KeyValue" +
"\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002 \002(\014\022\021\n\tqualifi" +
"er\030\003 \002(\014\022\021\n\ttimestamp\030\004 \001(\004\022\033\n\010key_type\030" +
"\005 \001(\0162\t.CellType\022\r\n\005value\030\006 \001(\014\022\014\n\004tags\030" +
"\007 \001(\014*`\n\010CellType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022" +
"\n\n\006DELETE\020\010\022\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE" +
"_FAMILY\020\016\022\014\n\007MAXIMUM\020\377\001B=\n*org.apache.ha",
"doop.hbase.protobuf.generatedB\nCellProto" +
"sH\001\240\001\001"
"\n\nCell.proto\022\010hbase.pb\"\215\001\n\004Cell\022\013\n\003row\030\001" +
" \001(\014\022\016\n\006family\030\002 \001(\014\022\021\n\tqualifier\030\003 \001(\014\022" +
"\021\n\ttimestamp\030\004 \001(\004\022%\n\tcell_type\030\005 \001(\0162\022." +
"hbase.pb.CellType\022\r\n\005value\030\006 \001(\014\022\014\n\004tags" +
"\030\007 \001(\014\"\220\001\n\010KeyValue\022\013\n\003row\030\001 \002(\014\022\016\n\006fami" +
"ly\030\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttimestamp" +
"\030\004 \001(\004\022$\n\010key_type\030\005 \001(\0162\022.hbase.pb.Cell" +
"Type\022\r\n\005value\030\006 \001(\014\022\014\n\004tags\030\007 \001(\014*`\n\010Cel" +
"lType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022" +
"\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n",
"\007MAXIMUM\020\377\001B=\n*org.apache.hadoop.hbase.p" +
"rotobuf.generatedB\nCellProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_Cell_descriptor =
internal_static_hbase_pb_Cell_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_Cell_fieldAccessorTable = new
internal_static_hbase_pb_Cell_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_Cell_descriptor,
internal_static_hbase_pb_Cell_descriptor,
new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "CellType", "Value", "Tags", });
internal_static_KeyValue_descriptor =
internal_static_hbase_pb_KeyValue_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_KeyValue_fieldAccessorTable = new
internal_static_hbase_pb_KeyValue_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_KeyValue_descriptor,
internal_static_hbase_pb_KeyValue_descriptor,
new java.lang.String[] { "Row", "Family", "Qualifier", "Timestamp", "KeyType", "Value", "Tags", });
return null;
}

View File

@ -39,7 +39,7 @@ public final class ClusterIdProtos {
getClusterIdBytes();
}
/**
* Protobuf type {@code ClusterId}
* Protobuf type {@code hbase.pb.ClusterId}
*
* <pre>
**
@ -114,12 +114,12 @@ public final class ClusterIdProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
}
@ -347,7 +347,7 @@ public final class ClusterIdProtos {
return builder;
}
/**
* Protobuf type {@code ClusterId}
* Protobuf type {@code hbase.pb.ClusterId}
*
* <pre>
**
@ -360,12 +360,12 @@ public final class ClusterIdProtos {
implements org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
}
@ -401,7 +401,7 @@ public final class ClusterIdProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_hbase_pb_ClusterId_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() {
@ -574,7 +574,7 @@ public final class ClusterIdProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:ClusterId)
// @@protoc_insertion_point(builder_scope:hbase.pb.ClusterId)
}
static {
@ -582,14 +582,14 @@ public final class ClusterIdProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ClusterId)
// @@protoc_insertion_point(class_scope:hbase.pb.ClusterId)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ClusterId_descriptor;
internal_static_hbase_pb_ClusterId_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ClusterId_fieldAccessorTable;
internal_static_hbase_pb_ClusterId_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -599,20 +599,21 @@ public final class ClusterIdProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\017ClusterId.proto\"\037\n\tClusterId\022\022\n\ncluste" +
"r_id\030\001 \002(\tBB\n*org.apache.hadoop.hbase.pr" +
"otobuf.generatedB\017ClusterIdProtosH\001\240\001\001"
"\n\017ClusterId.proto\022\010hbase.pb\"\037\n\tClusterId" +
"\022\022\n\ncluster_id\030\001 \002(\tBB\n*org.apache.hadoo" +
"p.hbase.protobuf.generatedB\017ClusterIdPro" +
"tosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_ClusterId_descriptor =
internal_static_hbase_pb_ClusterId_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_ClusterId_fieldAccessorTable = new
internal_static_hbase_pb_ClusterId_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ClusterId_descriptor,
internal_static_hbase_pb_ClusterId_descriptor,
new java.lang.String[] { "ClusterId", });
return null;
}

View File

@ -67,7 +67,7 @@ public final class EncryptionProtos {
com.google.protobuf.ByteString getHash();
}
/**
* Protobuf type {@code WrappedKey}
* Protobuf type {@code hbase.pb.WrappedKey}
*/
public static final class WrappedKey extends
com.google.protobuf.GeneratedMessage
@ -156,12 +156,12 @@ public final class EncryptionProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.class, org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.Builder.class);
}
@ -517,19 +517,19 @@ public final class EncryptionProtos {
return builder;
}
/**
* Protobuf type {@code WrappedKey}
* Protobuf type {@code hbase.pb.WrappedKey}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKeyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.class, org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey.Builder.class);
}
@ -573,7 +573,7 @@ public final class EncryptionProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_WrappedKey_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.internal_static_hbase_pb_WrappedKey_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.EncryptionProtos.WrappedKey getDefaultInstanceForType() {
@ -899,7 +899,7 @@ public final class EncryptionProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:WrappedKey)
// @@protoc_insertion_point(builder_scope:hbase.pb.WrappedKey)
}
static {
@ -907,14 +907,14 @@ public final class EncryptionProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:WrappedKey)
// @@protoc_insertion_point(class_scope:hbase.pb.WrappedKey)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_WrappedKey_descriptor;
internal_static_hbase_pb_WrappedKey_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_WrappedKey_fieldAccessorTable;
internal_static_hbase_pb_WrappedKey_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -924,22 +924,22 @@ public final class EncryptionProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\020Encryption.proto\"W\n\nWrappedKey\022\021\n\talgo" +
"rithm\030\001 \002(\t\022\016\n\006length\030\002 \002(\r\022\014\n\004data\030\003 \002(" +
"\014\022\n\n\002iv\030\004 \001(\014\022\014\n\004hash\030\005 \001(\014BC\n*org.apach" +
"e.hadoop.hbase.protobuf.generatedB\020Encry" +
"ptionProtosH\001\240\001\001"
"\n\020Encryption.proto\022\010hbase.pb\"W\n\nWrappedK" +
"ey\022\021\n\talgorithm\030\001 \002(\t\022\016\n\006length\030\002 \002(\r\022\014\n" +
"\004data\030\003 \002(\014\022\n\n\002iv\030\004 \001(\014\022\014\n\004hash\030\005 \001(\014BC\n" +
"*org.apache.hadoop.hbase.protobuf.genera" +
"tedB\020EncryptionProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_WrappedKey_descriptor =
internal_static_hbase_pb_WrappedKey_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_WrappedKey_fieldAccessorTable = new
internal_static_hbase_pb_WrappedKey_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_WrappedKey_descriptor,
internal_static_hbase_pb_WrappedKey_descriptor,
new java.lang.String[] { "Algorithm", "Length", "Data", "Iv", "Hash", });
return null;
}

View File

@ -67,7 +67,7 @@ public final class ErrorHandlingProtos {
int getLineNumber();
}
/**
* Protobuf type {@code StackTraceElementMessage}
* Protobuf type {@code hbase.pb.StackTraceElementMessage}
*
* <pre>
**
@ -157,12 +157,12 @@ public final class ErrorHandlingProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder.class);
}
@ -527,7 +527,7 @@ public final class ErrorHandlingProtos {
return builder;
}
/**
* Protobuf type {@code StackTraceElementMessage}
* Protobuf type {@code hbase.pb.StackTraceElementMessage}
*
* <pre>
**
@ -540,12 +540,12 @@ public final class ErrorHandlingProtos {
implements org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder.class);
}
@ -587,7 +587,7 @@ public final class ErrorHandlingProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_StackTraceElementMessage_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_StackTraceElementMessage_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getDefaultInstanceForType() {
@ -938,7 +938,7 @@ public final class ErrorHandlingProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:StackTraceElementMessage)
// @@protoc_insertion_point(builder_scope:hbase.pb.StackTraceElementMessage)
}
static {
@ -946,7 +946,7 @@ public final class ErrorHandlingProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:StackTraceElementMessage)
// @@protoc_insertion_point(class_scope:hbase.pb.StackTraceElementMessage)
}
public interface GenericExceptionMessageOrBuilder
@ -992,33 +992,33 @@ public final class ErrorHandlingProtos {
*/
com.google.protobuf.ByteString getErrorInfo();
// repeated .StackTraceElementMessage trace = 4;
// repeated .hbase.pb.StackTraceElementMessage trace = 4;
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage>
getTraceList();
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getTrace(int index);
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
int getTraceCount();
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder>
getTraceOrBuilderList();
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder getTraceOrBuilder(
int index);
}
/**
* Protobuf type {@code GenericExceptionMessage}
* Protobuf type {@code hbase.pb.GenericExceptionMessage}
*
* <pre>
**
@ -1116,12 +1116,12 @@ public final class ErrorHandlingProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder.class);
}
@ -1244,36 +1244,36 @@ public final class ErrorHandlingProtos {
return errorInfo_;
}
// repeated .StackTraceElementMessage trace = 4;
// repeated .hbase.pb.StackTraceElementMessage trace = 4;
public static final int TRACE_FIELD_NUMBER = 4;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage> trace_;
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage> getTraceList() {
return trace_;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder>
getTraceOrBuilderList() {
return trace_;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public int getTraceCount() {
return trace_.size();
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getTrace(int index) {
return trace_.get(index);
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder getTraceOrBuilder(
int index) {
@ -1476,7 +1476,7 @@ public final class ErrorHandlingProtos {
return builder;
}
/**
* Protobuf type {@code GenericExceptionMessage}
* Protobuf type {@code hbase.pb.GenericExceptionMessage}
*
* <pre>
**
@ -1491,12 +1491,12 @@ public final class ErrorHandlingProtos {
implements org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder.class);
}
@ -1543,7 +1543,7 @@ public final class ErrorHandlingProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_GenericExceptionMessage_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_GenericExceptionMessage_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getDefaultInstanceForType() {
@ -1849,7 +1849,7 @@ public final class ErrorHandlingProtos {
return this;
}
// repeated .StackTraceElementMessage trace = 4;
// repeated .hbase.pb.StackTraceElementMessage trace = 4;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage> trace_ =
java.util.Collections.emptyList();
private void ensureTraceIsMutable() {
@ -1863,7 +1863,7 @@ public final class ErrorHandlingProtos {
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder> traceBuilder_;
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage> getTraceList() {
if (traceBuilder_ == null) {
@ -1873,7 +1873,7 @@ public final class ErrorHandlingProtos {
}
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public int getTraceCount() {
if (traceBuilder_ == null) {
@ -1883,7 +1883,7 @@ public final class ErrorHandlingProtos {
}
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage getTrace(int index) {
if (traceBuilder_ == null) {
@ -1893,7 +1893,7 @@ public final class ErrorHandlingProtos {
}
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public Builder setTrace(
int index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage value) {
@ -1910,7 +1910,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public Builder setTrace(
int index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder builderForValue) {
@ -1924,7 +1924,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public Builder addTrace(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage value) {
if (traceBuilder_ == null) {
@ -1940,7 +1940,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public Builder addTrace(
int index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage value) {
@ -1957,7 +1957,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public Builder addTrace(
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder builderForValue) {
@ -1971,7 +1971,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public Builder addTrace(
int index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder builderForValue) {
@ -1985,7 +1985,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public Builder addAllTrace(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage> values) {
@ -1999,7 +1999,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public Builder clearTrace() {
if (traceBuilder_ == null) {
@ -2012,7 +2012,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public Builder removeTrace(int index) {
if (traceBuilder_ == null) {
@ -2025,14 +2025,14 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder getTraceBuilder(
int index) {
return getTraceFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder getTraceOrBuilder(
int index) {
@ -2042,7 +2042,7 @@ public final class ErrorHandlingProtos {
}
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessageOrBuilder>
getTraceOrBuilderList() {
@ -2053,14 +2053,14 @@ public final class ErrorHandlingProtos {
}
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder addTraceBuilder() {
return getTraceFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.getDefaultInstance());
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder addTraceBuilder(
int index) {
@ -2068,7 +2068,7 @@ public final class ErrorHandlingProtos {
index, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.getDefaultInstance());
}
/**
* <code>repeated .StackTraceElementMessage trace = 4;</code>
* <code>repeated .hbase.pb.StackTraceElementMessage trace = 4;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.StackTraceElementMessage.Builder>
getTraceBuilderList() {
@ -2089,7 +2089,7 @@ public final class ErrorHandlingProtos {
return traceBuilder_;
}
// @@protoc_insertion_point(builder_scope:GenericExceptionMessage)
// @@protoc_insertion_point(builder_scope:hbase.pb.GenericExceptionMessage)
}
static {
@ -2097,7 +2097,7 @@ public final class ErrorHandlingProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:GenericExceptionMessage)
// @@protoc_insertion_point(class_scope:hbase.pb.GenericExceptionMessage)
}
public interface ForeignExceptionMessageOrBuilder
@ -2118,22 +2118,22 @@ public final class ErrorHandlingProtos {
com.google.protobuf.ByteString
getSourceBytes();
// optional .GenericExceptionMessage generic_exception = 2;
// optional .hbase.pb.GenericExceptionMessage generic_exception = 2;
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
boolean hasGenericException();
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getGenericException();
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder getGenericExceptionOrBuilder();
}
/**
* Protobuf type {@code ForeignExceptionMessage}
* Protobuf type {@code hbase.pb.ForeignExceptionMessage}
*
* <pre>
**
@ -2221,12 +2221,12 @@ public final class ErrorHandlingProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder.class);
}
@ -2290,23 +2290,23 @@ public final class ErrorHandlingProtos {
}
}
// optional .GenericExceptionMessage generic_exception = 2;
// optional .hbase.pb.GenericExceptionMessage generic_exception = 2;
public static final int GENERIC_EXCEPTION_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage genericException_;
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public boolean hasGenericException() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getGenericException() {
return genericException_;
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder getGenericExceptionOrBuilder() {
return genericException_;
@ -2477,7 +2477,7 @@ public final class ErrorHandlingProtos {
return builder;
}
/**
* Protobuf type {@code ForeignExceptionMessage}
* Protobuf type {@code hbase.pb.ForeignExceptionMessage}
*
* <pre>
**
@ -2490,12 +2490,12 @@ public final class ErrorHandlingProtos {
implements org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessageOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.class, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage.Builder.class);
}
@ -2538,7 +2538,7 @@ public final class ErrorHandlingProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_ForeignExceptionMessage_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.internal_static_hbase_pb_ForeignExceptionMessage_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.ForeignExceptionMessage getDefaultInstanceForType() {
@ -2694,18 +2694,18 @@ public final class ErrorHandlingProtos {
return this;
}
// optional .GenericExceptionMessage generic_exception = 2;
// optional .hbase.pb.GenericExceptionMessage generic_exception = 2;
private org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage genericException_ = org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder> genericExceptionBuilder_;
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public boolean hasGenericException() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage getGenericException() {
if (genericExceptionBuilder_ == null) {
@ -2715,7 +2715,7 @@ public final class ErrorHandlingProtos {
}
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public Builder setGenericException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage value) {
if (genericExceptionBuilder_ == null) {
@ -2731,7 +2731,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public Builder setGenericException(
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder builderForValue) {
@ -2745,7 +2745,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public Builder mergeGenericException(org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage value) {
if (genericExceptionBuilder_ == null) {
@ -2764,7 +2764,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public Builder clearGenericException() {
if (genericExceptionBuilder_ == null) {
@ -2777,7 +2777,7 @@ public final class ErrorHandlingProtos {
return this;
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder getGenericExceptionBuilder() {
bitField0_ |= 0x00000002;
@ -2785,7 +2785,7 @@ public final class ErrorHandlingProtos {
return getGenericExceptionFieldBuilder().getBuilder();
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder getGenericExceptionOrBuilder() {
if (genericExceptionBuilder_ != null) {
@ -2795,7 +2795,7 @@ public final class ErrorHandlingProtos {
}
}
/**
* <code>optional .GenericExceptionMessage generic_exception = 2;</code>
* <code>optional .hbase.pb.GenericExceptionMessage generic_exception = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessage.Builder, org.apache.hadoop.hbase.protobuf.generated.ErrorHandlingProtos.GenericExceptionMessageOrBuilder>
@ -2811,7 +2811,7 @@ public final class ErrorHandlingProtos {
return genericExceptionBuilder_;
}
// @@protoc_insertion_point(builder_scope:ForeignExceptionMessage)
// @@protoc_insertion_point(builder_scope:hbase.pb.ForeignExceptionMessage)
}
static {
@ -2819,24 +2819,24 @@ public final class ErrorHandlingProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ForeignExceptionMessage)
// @@protoc_insertion_point(class_scope:hbase.pb.ForeignExceptionMessage)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_StackTraceElementMessage_descriptor;
internal_static_hbase_pb_StackTraceElementMessage_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_StackTraceElementMessage_fieldAccessorTable;
internal_static_hbase_pb_StackTraceElementMessage_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_GenericExceptionMessage_descriptor;
internal_static_hbase_pb_GenericExceptionMessage_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_GenericExceptionMessage_fieldAccessorTable;
internal_static_hbase_pb_GenericExceptionMessage_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ForeignExceptionMessage_descriptor;
internal_static_hbase_pb_ForeignExceptionMessage_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ForeignExceptionMessage_fieldAccessorTable;
internal_static_hbase_pb_ForeignExceptionMessage_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -2846,40 +2846,41 @@ public final class ErrorHandlingProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\023ErrorHandling.proto\"p\n\030StackTraceEleme" +
"ntMessage\022\027\n\017declaring_class\030\001 \001(\t\022\023\n\013me" +
"thod_name\030\002 \001(\t\022\021\n\tfile_name\030\003 \001(\t\022\023\n\013li" +
"ne_number\030\004 \001(\005\"|\n\027GenericExceptionMessa" +
"ge\022\022\n\nclass_name\030\001 \001(\t\022\017\n\007message\030\002 \001(\t\022" +
"\022\n\nerror_info\030\003 \001(\014\022(\n\005trace\030\004 \003(\0132\031.Sta" +
"ckTraceElementMessage\"^\n\027ForeignExceptio" +
"nMessage\022\016\n\006source\030\001 \001(\t\0223\n\021generic_exce" +
"ption\030\002 \001(\0132\030.GenericExceptionMessageBF\n" +
"*org.apache.hadoop.hbase.protobuf.genera",
"tedB\023ErrorHandlingProtosH\001\240\001\001"
"\n\023ErrorHandling.proto\022\010hbase.pb\"p\n\030Stack" +
"TraceElementMessage\022\027\n\017declaring_class\030\001" +
" \001(\t\022\023\n\013method_name\030\002 \001(\t\022\021\n\tfile_name\030\003" +
" \001(\t\022\023\n\013line_number\030\004 \001(\005\"\205\001\n\027GenericExc" +
"eptionMessage\022\022\n\nclass_name\030\001 \001(\t\022\017\n\007mes" +
"sage\030\002 \001(\t\022\022\n\nerror_info\030\003 \001(\014\0221\n\005trace\030" +
"\004 \003(\0132\".hbase.pb.StackTraceElementMessag" +
"e\"g\n\027ForeignExceptionMessage\022\016\n\006source\030\001" +
" \001(\t\022<\n\021generic_exception\030\002 \001(\0132!.hbase." +
"pb.GenericExceptionMessageBF\n*org.apache",
".hadoop.hbase.protobuf.generatedB\023ErrorH" +
"andlingProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_StackTraceElementMessage_descriptor =
internal_static_hbase_pb_StackTraceElementMessage_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_StackTraceElementMessage_fieldAccessorTable = new
internal_static_hbase_pb_StackTraceElementMessage_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_StackTraceElementMessage_descriptor,
internal_static_hbase_pb_StackTraceElementMessage_descriptor,
new java.lang.String[] { "DeclaringClass", "MethodName", "FileName", "LineNumber", });
internal_static_GenericExceptionMessage_descriptor =
internal_static_hbase_pb_GenericExceptionMessage_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_GenericExceptionMessage_fieldAccessorTable = new
internal_static_hbase_pb_GenericExceptionMessage_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_GenericExceptionMessage_descriptor,
internal_static_hbase_pb_GenericExceptionMessage_descriptor,
new java.lang.String[] { "ClassName", "Message", "ErrorInfo", "Trace", });
internal_static_ForeignExceptionMessage_descriptor =
internal_static_hbase_pb_ForeignExceptionMessage_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_ForeignExceptionMessage_fieldAccessorTable = new
internal_static_hbase_pb_ForeignExceptionMessage_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ForeignExceptionMessage_descriptor,
internal_static_hbase_pb_ForeignExceptionMessage_descriptor,
new java.lang.String[] { "Source", "GenericException", });
return null;
}

View File

@ -27,7 +27,7 @@ public final class FSProtos {
getVersionBytes();
}
/**
* Protobuf type {@code HBaseVersionFileContent}
* Protobuf type {@code hbase.pb.HBaseVersionFileContent}
*
* <pre>
**
@ -101,12 +101,12 @@ public final class FSProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class);
}
@ -322,7 +322,7 @@ public final class FSProtos {
return builder;
}
/**
* Protobuf type {@code HBaseVersionFileContent}
* Protobuf type {@code hbase.pb.HBaseVersionFileContent}
*
* <pre>
**
@ -334,12 +334,12 @@ public final class FSProtos {
implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class);
}
@ -375,7 +375,7 @@ public final class FSProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_HBaseVersionFileContent_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getDefaultInstanceForType() {
@ -524,7 +524,7 @@ public final class FSProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:HBaseVersionFileContent)
// @@protoc_insertion_point(builder_scope:hbase.pb.HBaseVersionFileContent)
}
static {
@ -532,7 +532,7 @@ public final class FSProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:HBaseVersionFileContent)
// @@protoc_insertion_point(class_scope:hbase.pb.HBaseVersionFileContent)
}
public interface ReferenceOrBuilder
@ -548,18 +548,18 @@ public final class FSProtos {
*/
com.google.protobuf.ByteString getSplitkey();
// required .Reference.Range range = 2;
// required .hbase.pb.Reference.Range range = 2;
/**
* <code>required .Reference.Range range = 2;</code>
* <code>required .hbase.pb.Reference.Range range = 2;</code>
*/
boolean hasRange();
/**
* <code>required .Reference.Range range = 2;</code>
* <code>required .hbase.pb.Reference.Range range = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange();
}
/**
* Protobuf type {@code Reference}
* Protobuf type {@code hbase.pb.Reference}
*
* <pre>
**
@ -644,12 +644,12 @@ public final class FSProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class);
}
@ -670,7 +670,7 @@ public final class FSProtos {
}
/**
* Protobuf enum {@code Reference.Range}
* Protobuf enum {@code hbase.pb.Reference.Range}
*/
public enum Range
implements com.google.protobuf.ProtocolMessageEnum {
@ -748,7 +748,7 @@ public final class FSProtos {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:Reference.Range)
// @@protoc_insertion_point(enum_scope:hbase.pb.Reference.Range)
}
private int bitField0_;
@ -768,17 +768,17 @@ public final class FSProtos {
return splitkey_;
}
// required .Reference.Range range = 2;
// required .hbase.pb.Reference.Range range = 2;
public static final int RANGE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_;
/**
* <code>required .Reference.Range range = 2;</code>
* <code>required .hbase.pb.Reference.Range range = 2;</code>
*/
public boolean hasRange() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .Reference.Range range = 2;</code>
* <code>required .hbase.pb.Reference.Range range = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() {
return range_;
@ -957,7 +957,7 @@ public final class FSProtos {
return builder;
}
/**
* Protobuf type {@code Reference}
* Protobuf type {@code hbase.pb.Reference}
*
* <pre>
**
@ -969,12 +969,12 @@ public final class FSProtos {
implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class);
}
@ -1012,7 +1012,7 @@ public final class FSProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_hbase_pb_Reference_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getDefaultInstanceForType() {
@ -1132,22 +1132,22 @@ public final class FSProtos {
return this;
}
// required .Reference.Range range = 2;
// required .hbase.pb.Reference.Range range = 2;
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP;
/**
* <code>required .Reference.Range range = 2;</code>
* <code>required .hbase.pb.Reference.Range range = 2;</code>
*/
public boolean hasRange() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .Reference.Range range = 2;</code>
* <code>required .hbase.pb.Reference.Range range = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() {
return range_;
}
/**
* <code>required .Reference.Range range = 2;</code>
* <code>required .hbase.pb.Reference.Range range = 2;</code>
*/
public Builder setRange(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value) {
if (value == null) {
@ -1159,7 +1159,7 @@ public final class FSProtos {
return this;
}
/**
* <code>required .Reference.Range range = 2;</code>
* <code>required .hbase.pb.Reference.Range range = 2;</code>
*/
public Builder clearRange() {
bitField0_ = (bitField0_ & ~0x00000002);
@ -1168,7 +1168,7 @@ public final class FSProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:Reference)
// @@protoc_insertion_point(builder_scope:hbase.pb.Reference)
}
static {
@ -1176,19 +1176,19 @@ public final class FSProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:Reference)
// @@protoc_insertion_point(class_scope:hbase.pb.Reference)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_HBaseVersionFileContent_descriptor;
internal_static_hbase_pb_HBaseVersionFileContent_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_HBaseVersionFileContent_fieldAccessorTable;
internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_Reference_descriptor;
internal_static_hbase_pb_Reference_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_Reference_fieldAccessorTable;
internal_static_hbase_pb_Reference_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -1198,29 +1198,29 @@ public final class FSProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\010FS.proto\"*\n\027HBaseVersionFileContent\022\017\n" +
"\007version\030\001 \002(\t\"\\\n\tReference\022\020\n\010splitkey\030" +
"\001 \002(\014\022\037\n\005range\030\002 \002(\0162\020.Reference.Range\"\034" +
"\n\005Range\022\007\n\003TOP\020\000\022\n\n\006BOTTOM\020\001B;\n*org.apac" +
"he.hadoop.hbase.protobuf.generatedB\010FSPr" +
"otosH\001\240\001\001"
"\n\010FS.proto\022\010hbase.pb\"*\n\027HBaseVersionFile" +
"Content\022\017\n\007version\030\001 \002(\t\"e\n\tReference\022\020\n" +
"\010splitkey\030\001 \002(\014\022(\n\005range\030\002 \002(\0162\031.hbase.p" +
"b.Reference.Range\"\034\n\005Range\022\007\n\003TOP\020\000\022\n\n\006B" +
"OTTOM\020\001B;\n*org.apache.hadoop.hbase.proto" +
"buf.generatedB\010FSProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_HBaseVersionFileContent_descriptor =
internal_static_hbase_pb_HBaseVersionFileContent_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_HBaseVersionFileContent_fieldAccessorTable = new
internal_static_hbase_pb_HBaseVersionFileContent_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_HBaseVersionFileContent_descriptor,
internal_static_hbase_pb_HBaseVersionFileContent_descriptor,
new java.lang.String[] { "Version", });
internal_static_Reference_descriptor =
internal_static_hbase_pb_Reference_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_Reference_fieldAccessorTable = new
internal_static_hbase_pb_Reference_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_Reference_descriptor,
internal_static_hbase_pb_Reference_descriptor,
new java.lang.String[] { "Splitkey", "Range", });
return null;
}

View File

@ -11,33 +11,33 @@ public final class HFileProtos {
public interface FileInfoProtoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .BytesBytesPair map_entry = 1;
// repeated .hbase.pb.BytesBytesPair map_entry = 1;
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>
getMapEntryList();
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index);
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
int getMapEntryCount();
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getMapEntryOrBuilderList();
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder(
int index);
}
/**
* Protobuf type {@code FileInfoProto}
* Protobuf type {@code hbase.pb.FileInfoProto}
*
* <pre>
* Map of name/values
@ -116,12 +116,12 @@ public final class HFileProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.class, org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.Builder.class);
}
@ -141,36 +141,36 @@ public final class HFileProtos {
return PARSER;
}
// repeated .BytesBytesPair map_entry = 1;
// repeated .hbase.pb.BytesBytesPair map_entry = 1;
public static final int MAP_ENTRY_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> mapEntry_;
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getMapEntryList() {
return mapEntry_;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getMapEntryOrBuilderList() {
return mapEntry_;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public int getMapEntryCount() {
return mapEntry_.size();
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) {
return mapEntry_.get(index);
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder(
int index) {
@ -328,7 +328,7 @@ public final class HFileProtos {
return builder;
}
/**
* Protobuf type {@code FileInfoProto}
* Protobuf type {@code hbase.pb.FileInfoProto}
*
* <pre>
* Map of name/values
@ -339,12 +339,12 @@ public final class HFileProtos {
implements org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.class, org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto.Builder.class);
}
@ -385,7 +385,7 @@ public final class HFileProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileInfoProto_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileInfoProto_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileInfoProto getDefaultInstanceForType() {
@ -486,7 +486,7 @@ public final class HFileProtos {
}
private int bitField0_;
// repeated .BytesBytesPair map_entry = 1;
// repeated .hbase.pb.BytesBytesPair map_entry = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> mapEntry_ =
java.util.Collections.emptyList();
private void ensureMapEntryIsMutable() {
@ -500,7 +500,7 @@ public final class HFileProtos {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> mapEntryBuilder_;
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getMapEntryList() {
if (mapEntryBuilder_ == null) {
@ -510,7 +510,7 @@ public final class HFileProtos {
}
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public int getMapEntryCount() {
if (mapEntryBuilder_ == null) {
@ -520,7 +520,7 @@ public final class HFileProtos {
}
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getMapEntry(int index) {
if (mapEntryBuilder_ == null) {
@ -530,7 +530,7 @@ public final class HFileProtos {
}
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public Builder setMapEntry(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
@ -547,7 +547,7 @@ public final class HFileProtos {
return this;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public Builder setMapEntry(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
@ -561,7 +561,7 @@ public final class HFileProtos {
return this;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public Builder addMapEntry(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
if (mapEntryBuilder_ == null) {
@ -577,7 +577,7 @@ public final class HFileProtos {
return this;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public Builder addMapEntry(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
@ -594,7 +594,7 @@ public final class HFileProtos {
return this;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public Builder addMapEntry(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
@ -608,7 +608,7 @@ public final class HFileProtos {
return this;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public Builder addMapEntry(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
@ -622,7 +622,7 @@ public final class HFileProtos {
return this;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public Builder addAllMapEntry(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) {
@ -636,7 +636,7 @@ public final class HFileProtos {
return this;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public Builder clearMapEntry() {
if (mapEntryBuilder_ == null) {
@ -649,7 +649,7 @@ public final class HFileProtos {
return this;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public Builder removeMapEntry(int index) {
if (mapEntryBuilder_ == null) {
@ -662,14 +662,14 @@ public final class HFileProtos {
return this;
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getMapEntryBuilder(
int index) {
return getMapEntryFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getMapEntryOrBuilder(
int index) {
@ -679,7 +679,7 @@ public final class HFileProtos {
}
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getMapEntryOrBuilderList() {
@ -690,14 +690,14 @@ public final class HFileProtos {
}
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder() {
return getMapEntryFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addMapEntryBuilder(
int index) {
@ -705,7 +705,7 @@ public final class HFileProtos {
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
}
/**
* <code>repeated .BytesBytesPair map_entry = 1;</code>
* <code>repeated .hbase.pb.BytesBytesPair map_entry = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder>
getMapEntryBuilderList() {
@ -726,7 +726,7 @@ public final class HFileProtos {
return mapEntryBuilder_;
}
// @@protoc_insertion_point(builder_scope:FileInfoProto)
// @@protoc_insertion_point(builder_scope:hbase.pb.FileInfoProto)
}
static {
@ -734,7 +734,7 @@ public final class HFileProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:FileInfoProto)
// @@protoc_insertion_point(class_scope:hbase.pb.FileInfoProto)
}
public interface FileTrailerProtoOrBuilder
@ -876,7 +876,7 @@ public final class HFileProtos {
com.google.protobuf.ByteString getEncryptionKey();
}
/**
* Protobuf type {@code FileTrailerProto}
* Protobuf type {@code hbase.pb.FileTrailerProto}
*
* <pre>
* HFile file trailer
@ -1009,12 +1009,12 @@ public final class HFileProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.class, org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class);
}
@ -1622,7 +1622,7 @@ public final class HFileProtos {
return builder;
}
/**
* Protobuf type {@code FileTrailerProto}
* Protobuf type {@code hbase.pb.FileTrailerProto}
*
* <pre>
* HFile file trailer
@ -1633,12 +1633,12 @@ public final class HFileProtos {
implements org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProtoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.class, org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto.Builder.class);
}
@ -1698,7 +1698,7 @@ public final class HFileProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_FileTrailerProto_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.HFileProtos.internal_static_hbase_pb_FileTrailerProto_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.HFileProtos.FileTrailerProto getDefaultInstanceForType() {
@ -2326,7 +2326,7 @@ public final class HFileProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:FileTrailerProto)
// @@protoc_insertion_point(builder_scope:hbase.pb.FileTrailerProto)
}
static {
@ -2334,19 +2334,19 @@ public final class HFileProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:FileTrailerProto)
// @@protoc_insertion_point(class_scope:hbase.pb.FileTrailerProto)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_FileInfoProto_descriptor;
internal_static_hbase_pb_FileInfoProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_FileInfoProto_fieldAccessorTable;
internal_static_hbase_pb_FileInfoProto_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_FileTrailerProto_descriptor;
internal_static_hbase_pb_FileTrailerProto_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_FileTrailerProto_fieldAccessorTable;
internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -2356,37 +2356,38 @@ public final class HFileProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\013HFile.proto\032\013HBase.proto\"3\n\rFileInfoPr" +
"oto\022\"\n\tmap_entry\030\001 \003(\0132\017.BytesBytesPair\"" +
"\221\003\n\020FileTrailerProto\022\030\n\020file_info_offset" +
"\030\001 \001(\004\022 \n\030load_on_open_data_offset\030\002 \001(\004" +
"\022$\n\034uncompressed_data_index_size\030\003 \001(\004\022 " +
"\n\030total_uncompressed_bytes\030\004 \001(\004\022\030\n\020data" +
"_index_count\030\005 \001(\r\022\030\n\020meta_index_count\030\006" +
" \001(\r\022\023\n\013entry_count\030\007 \001(\004\022\035\n\025num_data_in" +
"dex_levels\030\010 \001(\r\022\037\n\027first_data_block_off" +
"set\030\t \001(\004\022\036\n\026last_data_block_offset\030\n \001(",
"\004\022\035\n\025comparator_class_name\030\013 \001(\t\022\031\n\021comp" +
"ression_codec\030\014 \001(\r\022\026\n\016encryption_key\030\r " +
"\001(\014BA\n*org.apache.hadoop.hbase.protobuf." +
"generatedB\013HFileProtosH\001\210\001\001\240\001\001"
"\n\013HFile.proto\022\010hbase.pb\032\013HBase.proto\"<\n\r" +
"FileInfoProto\022+\n\tmap_entry\030\001 \003(\0132\030.hbase" +
".pb.BytesBytesPair\"\221\003\n\020FileTrailerProto\022" +
"\030\n\020file_info_offset\030\001 \001(\004\022 \n\030load_on_ope" +
"n_data_offset\030\002 \001(\004\022$\n\034uncompressed_data" +
"_index_size\030\003 \001(\004\022 \n\030total_uncompressed_" +
"bytes\030\004 \001(\004\022\030\n\020data_index_count\030\005 \001(\r\022\030\n" +
"\020meta_index_count\030\006 \001(\r\022\023\n\013entry_count\030\007" +
" \001(\004\022\035\n\025num_data_index_levels\030\010 \001(\r\022\037\n\027f" +
"irst_data_block_offset\030\t \001(\004\022\036\n\026last_dat",
"a_block_offset\030\n \001(\004\022\035\n\025comparator_class" +
"_name\030\013 \001(\t\022\031\n\021compression_codec\030\014 \001(\r\022\026" +
"\n\016encryption_key\030\r \001(\014BA\n*org.apache.had" +
"oop.hbase.protobuf.generatedB\013HFileProto" +
"sH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_FileInfoProto_descriptor =
internal_static_hbase_pb_FileInfoProto_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_FileInfoProto_fieldAccessorTable = new
internal_static_hbase_pb_FileInfoProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_FileInfoProto_descriptor,
internal_static_hbase_pb_FileInfoProto_descriptor,
new java.lang.String[] { "MapEntry", });
internal_static_FileTrailerProto_descriptor =
internal_static_hbase_pb_FileTrailerProto_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_FileTrailerProto_fieldAccessorTable = new
internal_static_hbase_pb_FileTrailerProto_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_FileTrailerProto_descriptor,
internal_static_hbase_pb_FileTrailerProto_descriptor,
new java.lang.String[] { "FileInfoOffset", "LoadOnOpenDataOffset", "UncompressedDataIndexSize", "TotalUncompressedBytes", "DataIndexCount", "MetaIndexCount", "EntryCount", "NumDataIndexLevels", "FirstDataBlockOffset", "LastDataBlockOffset", "ComparatorClassName", "CompressionCodec", "EncryptionKey", });
return null;
}

View File

@ -22,7 +22,7 @@ public final class LoadBalancerProtos {
boolean getBalancerOn();
}
/**
* Protobuf type {@code LoadBalancerState}
* Protobuf type {@code hbase.pb.LoadBalancerState}
*/
public static final class LoadBalancerState extends
com.google.protobuf.GeneratedMessage
@ -91,12 +91,12 @@ public final class LoadBalancerProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
}
@ -281,19 +281,19 @@ public final class LoadBalancerProtos {
return builder;
}
/**
* Protobuf type {@code LoadBalancerState}
* Protobuf type {@code hbase.pb.LoadBalancerState}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
}
@ -329,7 +329,7 @@ public final class LoadBalancerProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_hbase_pb_LoadBalancerState_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstanceForType() {
@ -431,7 +431,7 @@ public final class LoadBalancerProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:LoadBalancerState)
// @@protoc_insertion_point(builder_scope:hbase.pb.LoadBalancerState)
}
static {
@ -439,14 +439,14 @@ public final class LoadBalancerProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:LoadBalancerState)
// @@protoc_insertion_point(class_scope:hbase.pb.LoadBalancerState)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_LoadBalancerState_descriptor;
internal_static_hbase_pb_LoadBalancerState_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_LoadBalancerState_fieldAccessorTable;
internal_static_hbase_pb_LoadBalancerState_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -456,21 +456,21 @@ public final class LoadBalancerProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\022LoadBalancer.proto\"(\n\021LoadBalancerStat" +
"e\022\023\n\013balancer_on\030\001 \001(\010BE\n*org.apache.had" +
"oop.hbase.protobuf.generatedB\022LoadBalanc" +
"erProtosH\001\240\001\001"
"\n\022LoadBalancer.proto\022\010hbase.pb\"(\n\021LoadBa" +
"lancerState\022\023\n\013balancer_on\030\001 \001(\010BE\n*org." +
"apache.hadoop.hbase.protobuf.generatedB\022" +
"LoadBalancerProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_LoadBalancerState_descriptor =
internal_static_hbase_pb_LoadBalancerState_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_LoadBalancerState_fieldAccessorTable = new
internal_static_hbase_pb_LoadBalancerState_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_LoadBalancerState_descriptor,
internal_static_hbase_pb_LoadBalancerState_descriptor,
new java.lang.String[] { "BalancerOn", });
return null;
}

View File

@ -11,33 +11,33 @@ public final class MapReduceProtos {
public interface ScanMetricsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .NameInt64Pair metrics = 1;
// repeated .hbase.pb.NameInt64Pair metrics = 1;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair>
getMetricsList();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index);
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
int getMetricsCount();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index);
}
/**
* Protobuf type {@code ScanMetrics}
* Protobuf type {@code hbase.pb.ScanMetrics}
*/
public static final class ScanMetrics extends
com.google.protobuf.GeneratedMessage
@ -112,12 +112,12 @@ public final class MapReduceProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
}
@ -137,36 +137,36 @@ public final class MapReduceProtos {
return PARSER;
}
// repeated .NameInt64Pair metrics = 1;
// repeated .hbase.pb.NameInt64Pair metrics = 1;
public static final int METRICS_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> metrics_;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
return metrics_;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList() {
return metrics_;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public int getMetricsCount() {
return metrics_.size();
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
return metrics_.get(index);
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index) {
@ -318,19 +318,19 @@ public final class MapReduceProtos {
return builder;
}
/**
* Protobuf type {@code ScanMetrics}
* Protobuf type {@code hbase.pb.ScanMetrics}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
}
@ -371,7 +371,7 @@ public final class MapReduceProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_ScanMetrics_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getDefaultInstanceForType() {
@ -466,7 +466,7 @@ public final class MapReduceProtos {
}
private int bitField0_;
// repeated .NameInt64Pair metrics = 1;
// repeated .hbase.pb.NameInt64Pair metrics = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> metrics_ =
java.util.Collections.emptyList();
private void ensureMetricsIsMutable() {
@ -480,7 +480,7 @@ public final class MapReduceProtos {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder> metricsBuilder_;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
if (metricsBuilder_ == null) {
@ -490,7 +490,7 @@ public final class MapReduceProtos {
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public int getMetricsCount() {
if (metricsBuilder_ == null) {
@ -500,7 +500,7 @@ public final class MapReduceProtos {
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
if (metricsBuilder_ == null) {
@ -510,7 +510,7 @@ public final class MapReduceProtos {
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public Builder setMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
@ -527,7 +527,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public Builder setMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
@ -541,7 +541,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
@ -557,7 +557,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
@ -574,7 +574,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
@ -588,7 +588,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
@ -602,7 +602,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public Builder addAllMetrics(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> values) {
@ -616,7 +616,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public Builder clearMetrics() {
if (metricsBuilder_ == null) {
@ -629,7 +629,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public Builder removeMetrics(int index) {
if (metricsBuilder_ == null) {
@ -642,14 +642,14 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder getMetricsBuilder(
int index) {
return getMetricsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index) {
@ -659,7 +659,7 @@ public final class MapReduceProtos {
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList() {
@ -670,14 +670,14 @@ public final class MapReduceProtos {
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder() {
return getMetricsFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder(
int index) {
@ -685,7 +685,7 @@ public final class MapReduceProtos {
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
* <code>repeated .hbase.pb.NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder>
getMetricsBuilderList() {
@ -706,7 +706,7 @@ public final class MapReduceProtos {
return metricsBuilder_;
}
// @@protoc_insertion_point(builder_scope:ScanMetrics)
// @@protoc_insertion_point(builder_scope:hbase.pb.ScanMetrics)
}
static {
@ -714,7 +714,7 @@ public final class MapReduceProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ScanMetrics)
// @@protoc_insertion_point(class_scope:hbase.pb.ScanMetrics)
}
public interface TableSnapshotRegionSplitOrBuilder
@ -740,36 +740,36 @@ public final class MapReduceProtos {
com.google.protobuf.ByteString
getLocationsBytes(int index);
// optional .TableSchema table = 3;
// optional .hbase.pb.TableSchema table = 3;
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
boolean hasTable();
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTable();
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableOrBuilder();
// optional .RegionInfo region = 4;
// optional .hbase.pb.RegionInfo region = 4;
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
boolean hasRegion();
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion();
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder();
}
/**
* Protobuf type {@code TableSnapshotRegionSplit}
* Protobuf type {@code hbase.pb.TableSnapshotRegionSplit}
*/
public static final class TableSnapshotRegionSplit extends
com.google.protobuf.GeneratedMessage
@ -870,12 +870,12 @@ public final class MapReduceProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_TableSnapshotRegionSplit_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_TableSnapshotRegionSplit_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit.Builder.class);
}
@ -926,45 +926,45 @@ public final class MapReduceProtos {
return locations_.getByteString(index);
}
// optional .TableSchema table = 3;
// optional .hbase.pb.TableSchema table = 3;
public static final int TABLE_FIELD_NUMBER = 3;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema table_;
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public boolean hasTable() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTable() {
return table_;
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableOrBuilder() {
return table_;
}
// optional .RegionInfo region = 4;
// optional .hbase.pb.RegionInfo region = 4;
public static final int REGION_FIELD_NUMBER = 4;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_;
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() {
return region_;
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() {
return region_;
@ -1166,19 +1166,19 @@ public final class MapReduceProtos {
return builder;
}
/**
* Protobuf type {@code TableSnapshotRegionSplit}
* Protobuf type {@code hbase.pb.TableSnapshotRegionSplit}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplitOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_TableSnapshotRegionSplit_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_TableSnapshotRegionSplit_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit.Builder.class);
}
@ -1228,7 +1228,7 @@ public final class MapReduceProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_TableSnapshotRegionSplit_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.TableSnapshotRegionSplit getDefaultInstanceForType() {
@ -1433,18 +1433,18 @@ public final class MapReduceProtos {
return this;
}
// optional .TableSchema table = 3;
// optional .hbase.pb.TableSchema table = 3;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema table_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> tableBuilder_;
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public boolean hasTable() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getTable() {
if (tableBuilder_ == null) {
@ -1454,7 +1454,7 @@ public final class MapReduceProtos {
}
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public Builder setTable(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
if (tableBuilder_ == null) {
@ -1470,7 +1470,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public Builder setTable(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
@ -1484,7 +1484,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public Builder mergeTable(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
if (tableBuilder_ == null) {
@ -1503,7 +1503,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public Builder clearTable() {
if (tableBuilder_ == null) {
@ -1516,7 +1516,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getTableBuilder() {
bitField0_ |= 0x00000002;
@ -1524,7 +1524,7 @@ public final class MapReduceProtos {
return getTableFieldBuilder().getBuilder();
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getTableOrBuilder() {
if (tableBuilder_ != null) {
@ -1534,7 +1534,7 @@ public final class MapReduceProtos {
}
}
/**
* <code>optional .TableSchema table = 3;</code>
* <code>optional .hbase.pb.TableSchema table = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
@ -1550,18 +1550,18 @@ public final class MapReduceProtos {
return tableBuilder_;
}
// optional .RegionInfo region = 4;
// optional .hbase.pb.RegionInfo region = 4;
private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance();
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_;
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() {
if (regionBuilder_ == null) {
@ -1571,7 +1571,7 @@ public final class MapReduceProtos {
}
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionBuilder_ == null) {
@ -1587,7 +1587,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public Builder setRegion(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) {
@ -1601,7 +1601,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) {
if (regionBuilder_ == null) {
@ -1620,7 +1620,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public Builder clearRegion() {
if (regionBuilder_ == null) {
@ -1633,7 +1633,7 @@ public final class MapReduceProtos {
return this;
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionBuilder() {
bitField0_ |= 0x00000004;
@ -1641,7 +1641,7 @@ public final class MapReduceProtos {
return getRegionFieldBuilder().getBuilder();
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
@ -1651,7 +1651,7 @@ public final class MapReduceProtos {
}
}
/**
* <code>optional .RegionInfo region = 4;</code>
* <code>optional .hbase.pb.RegionInfo region = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>
@ -1667,7 +1667,7 @@ public final class MapReduceProtos {
return regionBuilder_;
}
// @@protoc_insertion_point(builder_scope:TableSnapshotRegionSplit)
// @@protoc_insertion_point(builder_scope:hbase.pb.TableSnapshotRegionSplit)
}
static {
@ -1675,19 +1675,19 @@ public final class MapReduceProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:TableSnapshotRegionSplit)
// @@protoc_insertion_point(class_scope:hbase.pb.TableSnapshotRegionSplit)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ScanMetrics_descriptor;
internal_static_hbase_pb_ScanMetrics_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ScanMetrics_fieldAccessorTable;
internal_static_hbase_pb_ScanMetrics_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_TableSnapshotRegionSplit_descriptor;
internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_TableSnapshotRegionSplit_fieldAccessorTable;
internal_static_hbase_pb_TableSnapshotRegionSplit_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -1697,30 +1697,31 @@ public final class MapReduceProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\017MapReduce.proto\032\013HBase.proto\".\n\013ScanMe" +
"trics\022\037\n\007metrics\030\001 \003(\0132\016.NameInt64Pair\"g" +
"\n\030TableSnapshotRegionSplit\022\021\n\tlocations\030" +
"\002 \003(\t\022\033\n\005table\030\003 \001(\0132\014.TableSchema\022\033\n\006re" +
"gion\030\004 \001(\0132\013.RegionInfoBB\n*org.apache.ha" +
"doop.hbase.protobuf.generatedB\017MapReduce" +
"ProtosH\001\240\001\001"
"\n\017MapReduce.proto\022\010hbase.pb\032\013HBase.proto" +
"\"7\n\013ScanMetrics\022(\n\007metrics\030\001 \003(\0132\027.hbase" +
".pb.NameInt64Pair\"y\n\030TableSnapshotRegion" +
"Split\022\021\n\tlocations\030\002 \003(\t\022$\n\005table\030\003 \001(\0132" +
"\025.hbase.pb.TableSchema\022$\n\006region\030\004 \001(\0132\024" +
".hbase.pb.RegionInfoBB\n*org.apache.hadoo" +
"p.hbase.protobuf.generatedB\017MapReducePro" +
"tosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_ScanMetrics_descriptor =
internal_static_hbase_pb_ScanMetrics_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_ScanMetrics_fieldAccessorTable = new
internal_static_hbase_pb_ScanMetrics_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ScanMetrics_descriptor,
internal_static_hbase_pb_ScanMetrics_descriptor,
new java.lang.String[] { "Metrics", });
internal_static_TableSnapshotRegionSplit_descriptor =
internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_TableSnapshotRegionSplit_fieldAccessorTable = new
internal_static_hbase_pb_TableSnapshotRegionSplit_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_TableSnapshotRegionSplit_descriptor,
internal_static_hbase_pb_TableSnapshotRegionSplit_descriptor,
new java.lang.String[] { "Locations", "Table", "Region", });
return null;
}

View File

@ -12,7 +12,7 @@ public final class MultiRowMutationProtos {
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiRowMutationProcessorRequest}
* Protobuf type {@code hbase.pb.MultiRowMutationProcessorRequest}
*/
public static final class MultiRowMutationProcessorRequest extends
com.google.protobuf.GeneratedMessage
@ -75,12 +75,12 @@ public final class MultiRowMutationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorRequest.Builder.class);
}
@ -231,19 +231,19 @@ public final class MultiRowMutationProtos {
return builder;
}
/**
* Protobuf type {@code MultiRowMutationProcessorRequest}
* Protobuf type {@code hbase.pb.MultiRowMutationProcessorRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorRequest.Builder.class);
}
@ -277,7 +277,7 @@ public final class MultiRowMutationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorRequest getDefaultInstanceForType() {
@ -335,7 +335,7 @@ public final class MultiRowMutationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorRequest)
// @@protoc_insertion_point(builder_scope:hbase.pb.MultiRowMutationProcessorRequest)
}
static {
@ -343,14 +343,14 @@ public final class MultiRowMutationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:MultiRowMutationProcessorRequest)
// @@protoc_insertion_point(class_scope:hbase.pb.MultiRowMutationProcessorRequest)
}
public interface MultiRowMutationProcessorResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiRowMutationProcessorResponse}
* Protobuf type {@code hbase.pb.MultiRowMutationProcessorResponse}
*/
public static final class MultiRowMutationProcessorResponse extends
com.google.protobuf.GeneratedMessage
@ -413,12 +413,12 @@ public final class MultiRowMutationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorResponse.Builder.class);
}
@ -569,19 +569,19 @@ public final class MultiRowMutationProtos {
return builder;
}
/**
* Protobuf type {@code MultiRowMutationProcessorResponse}
* Protobuf type {@code hbase.pb.MultiRowMutationProcessorResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorResponse.Builder.class);
}
@ -615,7 +615,7 @@ public final class MultiRowMutationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MultiRowMutationProcessorResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MultiRowMutationProcessorResponse getDefaultInstanceForType() {
@ -673,7 +673,7 @@ public final class MultiRowMutationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorResponse)
// @@protoc_insertion_point(builder_scope:hbase.pb.MultiRowMutationProcessorResponse)
}
static {
@ -681,33 +681,33 @@ public final class MultiRowMutationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:MultiRowMutationProcessorResponse)
// @@protoc_insertion_point(class_scope:hbase.pb.MultiRowMutationProcessorResponse)
}
public interface MutateRowsRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .MutationProto mutation_request = 1;
// repeated .hbase.pb.MutationProto mutation_request = 1;
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto>
getMutationRequestList();
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index);
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
int getMutationRequestCount();
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationRequestOrBuilderList();
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder(
int index);
@ -733,7 +733,7 @@ public final class MultiRowMutationProtos {
long getNonce();
}
/**
* Protobuf type {@code MutateRowsRequest}
* Protobuf type {@code hbase.pb.MutateRowsRequest}
*/
public static final class MutateRowsRequest extends
com.google.protobuf.GeneratedMessage
@ -818,12 +818,12 @@ public final class MultiRowMutationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.Builder.class);
}
@ -844,36 +844,36 @@ public final class MultiRowMutationProtos {
}
private int bitField0_;
// repeated .MutationProto mutation_request = 1;
// repeated .hbase.pb.MutationProto mutation_request = 1;
public static final int MUTATION_REQUEST_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> mutationRequest_;
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> getMutationRequestList() {
return mutationRequest_;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationRequestOrBuilderList() {
return mutationRequest_;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public int getMutationRequestCount() {
return mutationRequest_.size();
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) {
return mutationRequest_.get(index);
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder(
int index) {
@ -1097,19 +1097,19 @@ public final class MultiRowMutationProtos {
return builder;
}
/**
* Protobuf type {@code MutateRowsRequest}
* Protobuf type {@code hbase.pb.MutateRowsRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest.Builder.class);
}
@ -1154,7 +1154,7 @@ public final class MultiRowMutationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsRequest getDefaultInstanceForType() {
@ -1271,7 +1271,7 @@ public final class MultiRowMutationProtos {
}
private int bitField0_;
// repeated .MutationProto mutation_request = 1;
// repeated .hbase.pb.MutationProto mutation_request = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> mutationRequest_ =
java.util.Collections.emptyList();
private void ensureMutationRequestIsMutable() {
@ -1285,7 +1285,7 @@ public final class MultiRowMutationProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationRequestBuilder_;
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> getMutationRequestList() {
if (mutationRequestBuilder_ == null) {
@ -1295,7 +1295,7 @@ public final class MultiRowMutationProtos {
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public int getMutationRequestCount() {
if (mutationRequestBuilder_ == null) {
@ -1305,7 +1305,7 @@ public final class MultiRowMutationProtos {
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) {
if (mutationRequestBuilder_ == null) {
@ -1315,7 +1315,7 @@ public final class MultiRowMutationProtos {
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public Builder setMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
@ -1332,7 +1332,7 @@ public final class MultiRowMutationProtos {
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public Builder setMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
@ -1346,7 +1346,7 @@ public final class MultiRowMutationProtos {
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public Builder addMutationRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationRequestBuilder_ == null) {
@ -1362,7 +1362,7 @@ public final class MultiRowMutationProtos {
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public Builder addMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
@ -1379,7 +1379,7 @@ public final class MultiRowMutationProtos {
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public Builder addMutationRequest(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
@ -1393,7 +1393,7 @@ public final class MultiRowMutationProtos {
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public Builder addMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
@ -1407,7 +1407,7 @@ public final class MultiRowMutationProtos {
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public Builder addAllMutationRequest(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> values) {
@ -1421,7 +1421,7 @@ public final class MultiRowMutationProtos {
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public Builder clearMutationRequest() {
if (mutationRequestBuilder_ == null) {
@ -1434,7 +1434,7 @@ public final class MultiRowMutationProtos {
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public Builder removeMutationRequest(int index) {
if (mutationRequestBuilder_ == null) {
@ -1447,14 +1447,14 @@ public final class MultiRowMutationProtos {
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationRequestBuilder(
int index) {
return getMutationRequestFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder(
int index) {
@ -1464,7 +1464,7 @@ public final class MultiRowMutationProtos {
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationRequestOrBuilderList() {
@ -1475,14 +1475,14 @@ public final class MultiRowMutationProtos {
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder() {
return getMutationRequestFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance());
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder(
int index) {
@ -1490,7 +1490,7 @@ public final class MultiRowMutationProtos {
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance());
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
* <code>repeated .hbase.pb.MutationProto mutation_request = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder>
getMutationRequestBuilderList() {
@ -1577,7 +1577,7 @@ public final class MultiRowMutationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:MutateRowsRequest)
// @@protoc_insertion_point(builder_scope:hbase.pb.MutateRowsRequest)
}
static {
@ -1585,14 +1585,14 @@ public final class MultiRowMutationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:MutateRowsRequest)
// @@protoc_insertion_point(class_scope:hbase.pb.MutateRowsRequest)
}
public interface MutateRowsResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MutateRowsResponse}
* Protobuf type {@code hbase.pb.MutateRowsResponse}
*/
public static final class MutateRowsResponse extends
com.google.protobuf.GeneratedMessage
@ -1655,12 +1655,12 @@ public final class MultiRowMutationProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.Builder.class);
}
@ -1811,19 +1811,19 @@ public final class MultiRowMutationProtos {
return builder;
}
/**
* Protobuf type {@code MutateRowsResponse}
* Protobuf type {@code hbase.pb.MutateRowsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse.Builder.class);
}
@ -1857,7 +1857,7 @@ public final class MultiRowMutationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_MutateRowsResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.internal_static_hbase_pb_MutateRowsResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProtos.MutateRowsResponse getDefaultInstanceForType() {
@ -1915,7 +1915,7 @@ public final class MultiRowMutationProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:MutateRowsResponse)
// @@protoc_insertion_point(builder_scope:hbase.pb.MutateRowsResponse)
}
static {
@ -1923,11 +1923,11 @@ public final class MultiRowMutationProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:MutateRowsResponse)
// @@protoc_insertion_point(class_scope:hbase.pb.MutateRowsResponse)
}
/**
* Protobuf service {@code MultiRowMutationService}
* Protobuf service {@code hbase.pb.MultiRowMutationService}
*/
public static abstract class MultiRowMutationService
implements com.google.protobuf.Service {
@ -1935,7 +1935,7 @@ public final class MultiRowMutationProtos {
public interface Interface {
/**
* <code>rpc MutateRows(.MutateRowsRequest) returns (.MutateRowsResponse);</code>
* <code>rpc MutateRows(.hbase.pb.MutateRowsRequest) returns (.hbase.pb.MutateRowsResponse);</code>
*/
public abstract void mutateRows(
com.google.protobuf.RpcController controller,
@ -2020,7 +2020,7 @@ public final class MultiRowMutationProtos {
}
/**
* <code>rpc MutateRows(.MutateRowsRequest) returns (.MutateRowsResponse);</code>
* <code>rpc MutateRows(.hbase.pb.MutateRowsRequest) returns (.hbase.pb.MutateRowsResponse);</code>
*/
public abstract void mutateRows(
com.google.protobuf.RpcController controller,
@ -2155,29 +2155,29 @@ public final class MultiRowMutationProtos {
}
// @@protoc_insertion_point(class_scope:MultiRowMutationService)
// @@protoc_insertion_point(class_scope:hbase.pb.MultiRowMutationService)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_MultiRowMutationProcessorRequest_descriptor;
internal_static_hbase_pb_MultiRowMutationProcessorRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable;
internal_static_hbase_pb_MultiRowMutationProcessorRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_MultiRowMutationProcessorResponse_descriptor;
internal_static_hbase_pb_MultiRowMutationProcessorResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
internal_static_hbase_pb_MultiRowMutationProcessorResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_MutateRowsRequest_descriptor;
internal_static_hbase_pb_MutateRowsRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_MutateRowsRequest_fieldAccessorTable;
internal_static_hbase_pb_MutateRowsRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_MutateRowsResponse_descriptor;
internal_static_hbase_pb_MutateRowsResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_MutateRowsResponse_fieldAccessorTable;
internal_static_hbase_pb_MutateRowsResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -2187,45 +2187,46 @@ public final class MultiRowMutationProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\026MultiRowMutation.proto\032\014Client.proto\"\"" +
"\n MultiRowMutationProcessorRequest\"#\n!Mu" +
"ltiRowMutationProcessorResponse\"a\n\021Mutat" +
"eRowsRequest\022(\n\020mutation_request\030\001 \003(\0132\016" +
".MutationProto\022\023\n\013nonce_group\030\002 \001(\004\022\r\n\005n" +
"once\030\003 \001(\004\"\024\n\022MutateRowsResponse2P\n\027Mult" +
"iRowMutationService\0225\n\nMutateRows\022\022.Muta" +
"teRowsRequest\032\023.MutateRowsResponseBL\n*or" +
"g.apache.hadoop.hbase.protobuf.generated" +
"B\026MultiRowMutationProtosH\001\210\001\001\240\001\001"
"\n\026MultiRowMutation.proto\022\010hbase.pb\032\014Clie" +
"nt.proto\"\"\n MultiRowMutationProcessorReq" +
"uest\"#\n!MultiRowMutationProcessorRespons" +
"e\"j\n\021MutateRowsRequest\0221\n\020mutation_reque" +
"st\030\001 \003(\0132\027.hbase.pb.MutationProto\022\023\n\013non" +
"ce_group\030\002 \001(\004\022\r\n\005nonce\030\003 \001(\004\"\024\n\022MutateR" +
"owsResponse2b\n\027MultiRowMutationService\022G" +
"\n\nMutateRows\022\033.hbase.pb.MutateRowsReques" +
"t\032\034.hbase.pb.MutateRowsResponseBL\n*org.a" +
"pache.hadoop.hbase.protobuf.generatedB\026M",
"ultiRowMutationProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_MultiRowMutationProcessorRequest_descriptor =
internal_static_hbase_pb_MultiRowMutationProcessorRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable = new
internal_static_hbase_pb_MultiRowMutationProcessorRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiRowMutationProcessorRequest_descriptor,
internal_static_hbase_pb_MultiRowMutationProcessorRequest_descriptor,
new java.lang.String[] { });
internal_static_MultiRowMutationProcessorResponse_descriptor =
internal_static_hbase_pb_MultiRowMutationProcessorResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable = new
internal_static_hbase_pb_MultiRowMutationProcessorResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiRowMutationProcessorResponse_descriptor,
internal_static_hbase_pb_MultiRowMutationProcessorResponse_descriptor,
new java.lang.String[] { });
internal_static_MutateRowsRequest_descriptor =
internal_static_hbase_pb_MutateRowsRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_MutateRowsRequest_fieldAccessorTable = new
internal_static_hbase_pb_MutateRowsRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MutateRowsRequest_descriptor,
internal_static_hbase_pb_MutateRowsRequest_descriptor,
new java.lang.String[] { "MutationRequest", "NonceGroup", "Nonce", });
internal_static_MutateRowsResponse_descriptor =
internal_static_hbase_pb_MutateRowsResponse_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_MutateRowsResponse_fieldAccessorTable = new
internal_static_hbase_pb_MutateRowsResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MutateRowsResponse_descriptor,
internal_static_hbase_pb_MutateRowsResponse_descriptor,
new java.lang.String[] { });
return null;
}

View File

@ -72,7 +72,7 @@ public final class RowProcessorProtos {
long getNonce();
}
/**
* Protobuf type {@code ProcessRequest}
* Protobuf type {@code hbase.pb.ProcessRequest}
*/
public static final class ProcessRequest extends
com.google.protobuf.GeneratedMessage
@ -161,12 +161,12 @@ public final class RowProcessorProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.Builder.class);
}
@ -541,19 +541,19 @@ public final class RowProcessorProtos {
return builder;
}
/**
* Protobuf type {@code ProcessRequest}
* Protobuf type {@code hbase.pb.ProcessRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest.Builder.class);
}
@ -597,7 +597,7 @@ public final class RowProcessorProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest getDefaultInstanceForType() {
@ -952,7 +952,7 @@ public final class RowProcessorProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:ProcessRequest)
// @@protoc_insertion_point(builder_scope:hbase.pb.ProcessRequest)
}
static {
@ -960,7 +960,7 @@ public final class RowProcessorProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ProcessRequest)
// @@protoc_insertion_point(class_scope:hbase.pb.ProcessRequest)
}
public interface ProcessResponseOrBuilder
@ -977,7 +977,7 @@ public final class RowProcessorProtos {
com.google.protobuf.ByteString getRowProcessorResult();
}
/**
* Protobuf type {@code ProcessResponse}
* Protobuf type {@code hbase.pb.ProcessResponse}
*/
public static final class ProcessResponse extends
com.google.protobuf.GeneratedMessage
@ -1046,12 +1046,12 @@ public final class RowProcessorProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.Builder.class);
}
@ -1240,19 +1240,19 @@ public final class RowProcessorProtos {
return builder;
}
/**
* Protobuf type {@code ProcessResponse}
* Protobuf type {@code hbase.pb.ProcessResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse.Builder.class);
}
@ -1288,7 +1288,7 @@ public final class RowProcessorProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_ProcessResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_hbase_pb_ProcessResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse getDefaultInstanceForType() {
@ -1397,7 +1397,7 @@ public final class RowProcessorProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:ProcessResponse)
// @@protoc_insertion_point(builder_scope:hbase.pb.ProcessResponse)
}
static {
@ -1405,11 +1405,11 @@ public final class RowProcessorProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ProcessResponse)
// @@protoc_insertion_point(class_scope:hbase.pb.ProcessResponse)
}
/**
* Protobuf service {@code RowProcessorService}
* Protobuf service {@code hbase.pb.RowProcessorService}
*/
public static abstract class RowProcessorService
implements com.google.protobuf.Service {
@ -1417,7 +1417,7 @@ public final class RowProcessorProtos {
public interface Interface {
/**
* <code>rpc Process(.ProcessRequest) returns (.ProcessResponse);</code>
* <code>rpc Process(.hbase.pb.ProcessRequest) returns (.hbase.pb.ProcessResponse);</code>
*/
public abstract void process(
com.google.protobuf.RpcController controller,
@ -1502,7 +1502,7 @@ public final class RowProcessorProtos {
}
/**
* <code>rpc Process(.ProcessRequest) returns (.ProcessResponse);</code>
* <code>rpc Process(.hbase.pb.ProcessRequest) returns (.hbase.pb.ProcessResponse);</code>
*/
public abstract void process(
com.google.protobuf.RpcController controller,
@ -1637,19 +1637,19 @@ public final class RowProcessorProtos {
}
// @@protoc_insertion_point(class_scope:RowProcessorService)
// @@protoc_insertion_point(class_scope:hbase.pb.RowProcessorService)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ProcessRequest_descriptor;
internal_static_hbase_pb_ProcessRequest_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ProcessRequest_fieldAccessorTable;
internal_static_hbase_pb_ProcessRequest_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ProcessResponse_descriptor;
internal_static_hbase_pb_ProcessResponse_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ProcessResponse_fieldAccessorTable;
internal_static_hbase_pb_ProcessResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -1659,33 +1659,34 @@ public final class RowProcessorProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\022RowProcessor.proto\"\261\001\n\016ProcessRequest\022" +
" \n\030row_processor_class_name\030\001 \002(\t\022.\n&row" +
"_processor_initializer_message_name\030\002 \001(" +
"\t\022)\n!row_processor_initializer_message\030\003" +
" \001(\014\022\023\n\013nonce_group\030\004 \001(\004\022\r\n\005nonce\030\005 \001(\004" +
"\"/\n\017ProcessResponse\022\034\n\024row_processor_res" +
"ult\030\001 \002(\0142C\n\023RowProcessorService\022,\n\007Proc" +
"ess\022\017.ProcessRequest\032\020.ProcessResponseBH" +
"\n*org.apache.hadoop.hbase.protobuf.gener" +
"atedB\022RowProcessorProtosH\001\210\001\001\240\001\001"
"\n\022RowProcessor.proto\022\010hbase.pb\"\261\001\n\016Proce" +
"ssRequest\022 \n\030row_processor_class_name\030\001 " +
"\002(\t\022.\n&row_processor_initializer_message" +
"_name\030\002 \001(\t\022)\n!row_processor_initializer" +
"_message\030\003 \001(\014\022\023\n\013nonce_group\030\004 \001(\004\022\r\n\005n" +
"once\030\005 \001(\004\"/\n\017ProcessResponse\022\034\n\024row_pro" +
"cessor_result\030\001 \002(\0142U\n\023RowProcessorServi" +
"ce\022>\n\007Process\022\030.hbase.pb.ProcessRequest\032" +
"\031.hbase.pb.ProcessResponseBH\n*org.apache" +
".hadoop.hbase.protobuf.generatedB\022RowPro",
"cessorProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_ProcessRequest_descriptor =
internal_static_hbase_pb_ProcessRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_ProcessRequest_fieldAccessorTable = new
internal_static_hbase_pb_ProcessRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ProcessRequest_descriptor,
internal_static_hbase_pb_ProcessRequest_descriptor,
new java.lang.String[] { "RowProcessorClassName", "RowProcessorInitializerMessageName", "RowProcessorInitializerMessage", "NonceGroup", "Nonce", });
internal_static_ProcessResponse_descriptor =
internal_static_hbase_pb_ProcessResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_ProcessResponse_fieldAccessorTable = new
internal_static_hbase_pb_ProcessResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ProcessResponse_descriptor,
internal_static_hbase_pb_ProcessResponse_descriptor,
new java.lang.String[] { "RowProcessorResult", });
return null;
}

View File

@ -32,7 +32,7 @@ public final class TracingProtos {
long getParentId();
}
/**
* Protobuf type {@code RPCTInfo}
* Protobuf type {@code hbase.pb.RPCTInfo}
*
* <pre>
*Used to pass through the information necessary to continue
@ -114,12 +114,12 @@ public final class TracingProtos {
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder.class);
}
@ -337,7 +337,7 @@ public final class TracingProtos {
return builder;
}
/**
* Protobuf type {@code RPCTInfo}
* Protobuf type {@code hbase.pb.RPCTInfo}
*
* <pre>
*Used to pass through the information necessary to continue
@ -352,12 +352,12 @@ public final class TracingProtos {
implements org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_fieldAccessorTable
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo.Builder.class);
}
@ -395,7 +395,7 @@ public final class TracingProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_RPCTInfo_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.TracingProtos.internal_static_hbase_pb_RPCTInfo_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.TracingProtos.RPCTInfo getDefaultInstanceForType() {
@ -537,7 +537,7 @@ public final class TracingProtos {
return this;
}
// @@protoc_insertion_point(builder_scope:RPCTInfo)
// @@protoc_insertion_point(builder_scope:hbase.pb.RPCTInfo)
}
static {
@ -545,14 +545,14 @@ public final class TracingProtos {
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:RPCTInfo)
// @@protoc_insertion_point(class_scope:hbase.pb.RPCTInfo)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_RPCTInfo_descriptor;
internal_static_hbase_pb_RPCTInfo_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_RPCTInfo_fieldAccessorTable;
internal_static_hbase_pb_RPCTInfo_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@ -562,21 +562,21 @@ public final class TracingProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\rTracing.proto\"/\n\010RPCTInfo\022\020\n\010trace_id\030" +
"\001 \001(\003\022\021\n\tparent_id\030\002 \001(\003B@\n*org.apache.h" +
"adoop.hbase.protobuf.generatedB\rTracingP" +
"rotosH\001\240\001\001"
"\n\rTracing.proto\022\010hbase.pb\"/\n\010RPCTInfo\022\020\n" +
"\010trace_id\030\001 \001(\003\022\021\n\tparent_id\030\002 \001(\003B@\n*or" +
"g.apache.hadoop.hbase.protobuf.generated" +
"B\rTracingProtosH\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
internal_static_RPCTInfo_descriptor =
internal_static_hbase_pb_RPCTInfo_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_RPCTInfo_fieldAccessorTable = new
internal_static_hbase_pb_RPCTInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_RPCTInfo_descriptor,
internal_static_hbase_pb_RPCTInfo_descriptor,
new java.lang.String[] { "TraceId", "ParentId", });
return null;
}

View File

@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "AccessControlProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are used for Admin service.
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "AdminProtos";

View File

@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "AggregateProtos";

View File

@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "AuthenticationProtos";

View File

@ -17,6 +17,7 @@
*/
// Cell and KeyValue protos
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "CellProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are used for Client service.
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "ClientProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are shared throughout HBase
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "ClusterIdProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are used for ClustStatus
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "ClusterStatusProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are used for filters
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "ComparatorProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers used for encryption
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "EncryptionProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are used for error handling
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "ErrorHandlingProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are written into the filesystem
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "FSProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are used for filters
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "FilterProtos";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are shared throughout HBase
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "HBaseProtos";

View File

@ -15,6 +15,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "HFileProtos";
option java_generic_services = true;

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers to represent the state of the load balancer.
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "LoadBalancerProtos";

View File

@ -17,6 +17,7 @@
*/
//This file includes protocol buffers used in MapReduce only.
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "MapReduceProtos";

View File

@ -18,6 +18,7 @@
// All to do with the Master. Includes schema management since these
// changes are run by the Master process.
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "MasterProtos";

View File

@ -15,6 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "MasterProcedureProtos";
option java_generic_services = true;

View File

@ -15,6 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
import "Client.proto";
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "MultiRowMutationProtos";

View File

@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "ProcedureProtos";

View File

@ -15,6 +15,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "QuotaProtos";
option java_generic_services = true;

View File

@ -15,6 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
import "Tracing.proto";
import "HBase.proto";

View File

@ -17,6 +17,7 @@
*/
// This file contains protocol buffers that are used for RegionServerStatusProtocol.
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "RegionServerStatusProtos";

View File

@ -20,6 +20,8 @@
* See BaseRowProcessorEndpoint for the implementation.
* See HRegion#processRowsWithLocks() for details.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "RowProcessorProtos";
option java_generic_services = true;

View File

@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "SecureBulkLoadProtos";

View File

@ -15,6 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "SnapshotProtos";
option java_generic_services = true;

View File

@ -15,6 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "TracingProtos";
option java_generate_equals_and_hash = true;

View File

@ -15,6 +15,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "VisibilityLabelsProtos";

View File

@ -15,6 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "WALProtos";
option java_generic_services = false;

View File

@ -18,6 +18,7 @@
// ZNode data in hbase are serialized protobufs with a four byte
// 'magic' 'PBUF' prefix.
package hbase.pb;
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
option java_outer_classname = "ZooKeeperProtos";