HBASE-8165 Update our protobuf to 2.5 from 2.4.1; REVERT

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1466759 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-04-11 03:51:47 +00:00
parent 6d32cc0881
commit d1b3505b26
49 changed files with 40740 additions and 80091 deletions

View File

@ -326,7 +326,7 @@ public class ServerName implements Comparable<ServerName> {
int prefixLen = ProtobufUtil.lengthOfPBMagic();
try {
RootRegionServer rss =
RootRegionServer.PARSER.parseFrom(data, prefixLen, data.length - prefixLen);
RootRegionServer.newBuilder().mergeFrom(data, prefixLen, data.length - prefixLen).build();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName sn = rss.getServer();
return new ServerName(sn.getHostName(), sn.getPort(), sn.getStartCode());
} catch (InvalidProtocolBufferException e) {

View File

@ -41,7 +41,6 @@ import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import com.google.protobuf.RpcChannel;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
@ -1835,21 +1834,19 @@ public final class ProtobufUtil {
}
public static ScanMetrics toScanMetrics(final byte[] bytes) {
Parser<MapReduceProtos.ScanMetrics> parser = MapReduceProtos.ScanMetrics.PARSER;
MapReduceProtos.ScanMetrics pScanMetrics = null;
MapReduceProtos.ScanMetrics.Builder builder = MapReduceProtos.ScanMetrics.newBuilder();
try {
pScanMetrics = parser.parseFrom(bytes);
builder.mergeFrom(bytes);
} catch (InvalidProtocolBufferException e) {
//Ignored there are just no key values to add.
}
MapReduceProtos.ScanMetrics pScanMetrics = builder.build();
ScanMetrics scanMetrics = new ScanMetrics();
if (pScanMetrics != null) {
for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) {
if (pair.hasName() && pair.hasValue()) {
scanMetrics.setCounter(pair.getName(), pair.getValue());
}
}
}
return scanMetrics;
}

View File

@ -819,34 +819,24 @@ public final class RequestConverter {
return builder.build();
}
/**
* @see {@link #buildRollWALWriterRequest()
*/
private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST =
RollWALWriterRequest.newBuilder().build();
/**
* Create a new RollWALWriterRequest
*
* @return a ReplicateWALEntryRequest
*/
public static RollWALWriterRequest buildRollWALWriterRequest() {
return ROLL_WAL_WRITER_REQUEST;
RollWALWriterRequest.Builder builder = RollWALWriterRequest.newBuilder();
return builder.build();
}
/**
* @see {@link #buildGetServerInfoRequest()}
*/
private static GetServerInfoRequest GET_SERVER_INFO_REQUEST =
GetServerInfoRequest.newBuilder().build();
/**
* Create a new GetServerInfoRequest
*
* @return a GetServerInfoRequest
*/
public static GetServerInfoRequest buildGetServerInfoRequest() {
return GET_SERVER_INFO_REQUEST;
GetServerInfoRequest.Builder builder = GetServerInfoRequest.newBuilder();
return builder.build();
}
/**
@ -1149,33 +1139,21 @@ public final class RequestConverter {
return SetBalancerRunningRequest.newBuilder().setOn(on).setSynchronous(synchronous).build();
}
/**
* @see {@link #buildGetClusterStatusRequest}
*/
private static final GetClusterStatusRequest GET_CLUSTER_STATUS_REQUEST =
GetClusterStatusRequest.newBuilder().build();
/**
* Creates a protocol buffer GetClusterStatusRequest
*
* @return A GetClusterStatusRequest
*/
public static GetClusterStatusRequest buildGetClusterStatusRequest() {
return GET_CLUSTER_STATUS_REQUEST;
return GetClusterStatusRequest.newBuilder().build();
}
/**
* @see {@link #buildCatalogScanRequest}
*/
private static final CatalogScanRequest CATALOG_SCAN_REQUEST =
CatalogScanRequest.newBuilder().build();
/**
* Creates a request for running a catalog scan
* @return A {@link CatalogScanRequest}
*/
public static CatalogScanRequest buildCatalogScanRequest() {
return CATALOG_SCAN_REQUEST;
return CatalogScanRequest.newBuilder().build();
}
/**
@ -1186,18 +1164,12 @@ public final class RequestConverter {
return EnableCatalogJanitorRequest.newBuilder().setEnable(enable).build();
}
/**
* @see {@link #buildIsCatalogJanitorEnabledRequest()}
*/
private static final IsCatalogJanitorEnabledRequest IS_CATALOG_JANITOR_ENABLED_REQUEST =
IsCatalogJanitorEnabledRequest.newBuilder().build();
/**
* Creates a request for querying the master whether the catalog janitor is enabled
* @return A {@link IsCatalogJanitorEnabledRequest}
*/
public static IsCatalogJanitorEnabledRequest buildIsCatalogJanitorEnabledRequest() {
return IS_CATALOG_JANITOR_ENABLED_REQUEST;
return IsCatalogJanitorEnabledRequest.newBuilder().build();
}
/**

View File

@ -12,50 +12,17 @@ public final class ClusterIdProtos {
extends com.google.protobuf.MessageOrBuilder {
// required string clusterId = 1;
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
boolean hasClusterId();
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
java.lang.String getClusterId();
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
com.google.protobuf.ByteString
getClusterIdBytes();
String getClusterId();
}
/**
* Protobuf type {@code ClusterId}
*
* <pre>
**
* Content of the '/hbase/hbaseid', cluster id, znode.
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
* </pre>
*/
public static final class ClusterId extends
com.google.protobuf.GeneratedMessage
implements ClusterIdOrBuilder {
// Use ClusterId.newBuilder() to construct.
private ClusterId(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private ClusterId(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ClusterId(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private ClusterId(boolean noInit) {}
private static final ClusterId defaultInstance;
public static ClusterId getDefaultInstance() {
@ -66,52 +33,6 @@ public final class ClusterIdProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ClusterId(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
clusterId_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
@ -119,75 +40,35 @@ public final class ClusterIdProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
}
public static com.google.protobuf.Parser<ClusterId> PARSER =
new com.google.protobuf.AbstractParser<ClusterId>() {
public ClusterId parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ClusterId(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ClusterId> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable;
}
private int bitField0_;
// required string clusterId = 1;
public static final int CLUSTERID_FIELD_NUMBER = 1;
private java.lang.Object clusterId_;
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public boolean hasClusterId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public java.lang.String getClusterId() {
public String getClusterId() {
java.lang.Object ref = clusterId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
if (ref instanceof String) {
return (String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
String s = bs.toStringUtf8();
if (com.google.protobuf.Internal.isValidUtf8(bs)) {
clusterId_ = s;
}
return s;
}
}
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public com.google.protobuf.ByteString
getClusterIdBytes() {
private com.google.protobuf.ByteString getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof java.lang.String) {
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
com.google.protobuf.ByteString.copyFromUtf8((String) ref);
clusterId_ = b;
return b;
} else {
@ -263,12 +144,8 @@ public final class ClusterIdProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasClusterId()) {
@ -276,61 +153,74 @@ public final class ClusterIdProtos {
hash = (53 * hash) + getClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -346,15 +236,6 @@ public final class ClusterIdProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ClusterId}
*
* <pre>
**
* Content of the '/hbase/hbaseid', cluster id, znode.
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder {
@ -365,9 +246,7 @@ public final class ClusterIdProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder()
@ -375,8 +254,7 @@ public final class ClusterIdProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -401,7 +279,7 @@ public final class ClusterIdProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() {
@ -416,6 +294,16 @@ public final class ClusterIdProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId(this);
int from_bitField0_ = bitField0_;
@ -441,9 +329,7 @@ public final class ClusterIdProtos {
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) return this;
if (other.hasClusterId()) {
bitField0_ |= 0x00000001;
clusterId_ = other.clusterId_;
onChanged();
setClusterId(other.getClusterId());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
@ -461,80 +347,52 @@ public final class ClusterIdProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
clusterId_ = input.readBytes();
break;
}
}
}
}
private int bitField0_;
// required string clusterId = 1;
private java.lang.Object clusterId_ = "";
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public boolean hasClusterId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public java.lang.String getClusterId() {
public String getClusterId() {
java.lang.Object ref = clusterId_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
if (!(ref instanceof String)) {
String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
clusterId_ = s;
return s;
} else {
return (java.lang.String) ref;
return (String) ref;
}
}
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public com.google.protobuf.ByteString
getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public Builder setClusterId(
java.lang.String value) {
public Builder setClusterId(String value) {
if (value == null) {
throw new NullPointerException();
}
@ -543,35 +401,16 @@ public final class ClusterIdProtos {
onChanged();
return this;
}
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public Builder clearClusterId() {
bitField0_ = (bitField0_ & ~0x00000001);
clusterId_ = getDefaultInstance().getClusterId();
onChanged();
return this;
}
/**
* <code>required string clusterId = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public Builder setClusterIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
void setClusterId(com.google.protobuf.ByteString value) {
bitField0_ |= 0x00000001;
clusterId_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:ClusterId)
@ -613,7 +452,9 @@ public final class ClusterIdProtos {
internal_static_ClusterId_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ClusterId_descriptor,
new java.lang.String[] { "ClusterId", });
new java.lang.String[] { "ClusterId", },
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class,
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
return null;
}
};

View File

@ -12,37 +12,17 @@ public final class FSProtos {
extends com.google.protobuf.MessageOrBuilder {
// required string version = 1;
/**
* <code>required string version = 1;</code>
*/
boolean hasVersion();
/**
* <code>required string version = 1;</code>
*/
java.lang.String getVersion();
/**
* <code>required string version = 1;</code>
*/
com.google.protobuf.ByteString
getVersionBytes();
String getVersion();
}
/**
* Protobuf type {@code HBaseVersionFileContent}
*
* <pre>
**
* The ${HBASE_ROOTDIR}/hbase.version file content
* </pre>
*/
public static final class HBaseVersionFileContent extends
com.google.protobuf.GeneratedMessage
implements HBaseVersionFileContentOrBuilder {
// Use HBaseVersionFileContent.newBuilder() to construct.
private HBaseVersionFileContent(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private HBaseVersionFileContent(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private HBaseVersionFileContent(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private HBaseVersionFileContent(boolean noInit) {}
private static final HBaseVersionFileContent defaultInstance;
public static HBaseVersionFileContent getDefaultInstance() {
@ -53,52 +33,6 @@ public final class FSProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private HBaseVersionFileContent(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
version_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor;
@ -106,63 +40,35 @@ public final class FSProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class);
}
public static com.google.protobuf.Parser<HBaseVersionFileContent> PARSER =
new com.google.protobuf.AbstractParser<HBaseVersionFileContent>() {
public HBaseVersionFileContent parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new HBaseVersionFileContent(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<HBaseVersionFileContent> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable;
}
private int bitField0_;
// required string version = 1;
public static final int VERSION_FIELD_NUMBER = 1;
private java.lang.Object version_;
/**
* <code>required string version = 1;</code>
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string version = 1;</code>
*/
public java.lang.String getVersion() {
public String getVersion() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
if (ref instanceof String) {
return (String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
String s = bs.toStringUtf8();
if (com.google.protobuf.Internal.isValidUtf8(bs)) {
version_ = s;
}
return s;
}
}
/**
* <code>required string version = 1;</code>
*/
public com.google.protobuf.ByteString
getVersionBytes() {
private com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
com.google.protobuf.ByteString.copyFromUtf8((String) ref);
version_ = b;
return b;
} else {
@ -238,12 +144,8 @@ public final class FSProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasVersion()) {
@ -251,61 +153,74 @@ public final class FSProtos {
hash = (53 * hash) + getVersion().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -321,14 +236,6 @@ public final class FSProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code HBaseVersionFileContent}
*
* <pre>
**
* The ${HBASE_ROOTDIR}/hbase.version file content
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder {
@ -339,9 +246,7 @@ public final class FSProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder()
@ -349,8 +254,7 @@ public final class FSProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -375,7 +279,7 @@ public final class FSProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getDefaultInstanceForType() {
@ -390,6 +294,16 @@ public final class FSProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent(this);
int from_bitField0_ = bitField0_;
@ -415,9 +329,7 @@ public final class FSProtos {
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) return this;
if (other.hasVersion()) {
bitField0_ |= 0x00000001;
version_ = other.version_;
onChanged();
setVersion(other.getVersion());
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
@ -435,64 +347,52 @@ public final class FSProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
version_ = input.readBytes();
break;
}
}
}
}
private int bitField0_;
// required string version = 1;
private java.lang.Object version_ = "";
/**
* <code>required string version = 1;</code>
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required string version = 1;</code>
*/
public java.lang.String getVersion() {
public String getVersion() {
java.lang.Object ref = version_;
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
if (!(ref instanceof String)) {
String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
version_ = s;
return s;
} else {
return (java.lang.String) ref;
return (String) ref;
}
}
/**
* <code>required string version = 1;</code>
*/
public com.google.protobuf.ByteString
getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string version = 1;</code>
*/
public Builder setVersion(
java.lang.String value) {
public Builder setVersion(String value) {
if (value == null) {
throw new NullPointerException();
}
@ -501,27 +401,16 @@ public final class FSProtos {
onChanged();
return this;
}
/**
* <code>required string version = 1;</code>
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = getDefaultInstance().getVersion();
onChanged();
return this;
}
/**
* <code>required string version = 1;</code>
*/
public Builder setVersionBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
void setVersion(com.google.protobuf.ByteString value) {
bitField0_ |= 0x00000001;
version_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:HBaseVersionFileContent)
@ -539,42 +428,21 @@ public final class FSProtos {
extends com.google.protobuf.MessageOrBuilder {
// required bytes splitkey = 1;
/**
* <code>required bytes splitkey = 1;</code>
*/
boolean hasSplitkey();
/**
* <code>required bytes splitkey = 1;</code>
*/
com.google.protobuf.ByteString getSplitkey();
// required .Reference.Range range = 2;
/**
* <code>required .Reference.Range range = 2;</code>
*/
boolean hasRange();
/**
* <code>required .Reference.Range range = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange();
}
/**
* Protobuf type {@code Reference}
*
* <pre>
**
* Reference file content used when we split an hfile under a region.
* </pre>
*/
public static final class Reference extends
com.google.protobuf.GeneratedMessage
implements ReferenceOrBuilder {
// Use Reference.newBuilder() to construct.
private Reference(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private Reference(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Reference(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private Reference(boolean noInit) {}
private static final Reference defaultInstance;
public static Reference getDefaultInstance() {
@ -585,63 +453,6 @@ public final class FSProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Reference(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
splitkey_ = input.readBytes();
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
range_ = value;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor;
@ -649,48 +460,16 @@ public final class FSProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable;
}
public static com.google.protobuf.Parser<Reference> PARSER =
new com.google.protobuf.AbstractParser<Reference>() {
public Reference parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Reference(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Reference> getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code Reference.Range}
*/
public enum Range
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>TOP = 0;</code>
*/
TOP(0, 0),
/**
* <code>BOTTOM = 1;</code>
*/
BOTTOM(1, 1),
;
/**
* <code>TOP = 0;</code>
*/
public static final int TOP_VALUE = 0;
/**
* <code>BOTTOM = 1;</code>
*/
public static final int BOTTOM_VALUE = 1;
@ -729,7 +508,9 @@ public final class FSProtos {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor().getEnumTypes().get(0);
}
private static final Range[] VALUES = values();
private static final Range[] VALUES = {
TOP, BOTTOM,
};
public static Range valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
@ -755,15 +536,9 @@ public final class FSProtos {
// required bytes splitkey = 1;
public static final int SPLITKEY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString splitkey_;
/**
* <code>required bytes splitkey = 1;</code>
*/
public boolean hasSplitkey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes splitkey = 1;</code>
*/
public com.google.protobuf.ByteString getSplitkey() {
return splitkey_;
}
@ -771,15 +546,9 @@ public final class FSProtos {
// required .Reference.Range range = 2;
public static final int RANGE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_;
/**
* <code>required .Reference.Range range = 2;</code>
*/
public boolean hasRange() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .Reference.Range range = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() {
return range_;
}
@ -869,12 +638,8 @@ public final class FSProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSplitkey()) {
@ -886,61 +651,74 @@ public final class FSProtos {
hash = (53 * hash) + hashEnum(getRange());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -956,14 +734,6 @@ public final class FSProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code Reference}
*
* <pre>
**
* Reference file content used when we split an hfile under a region.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder {
@ -974,9 +744,7 @@ public final class FSProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.newBuilder()
@ -984,8 +752,7 @@ public final class FSProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -1012,7 +779,7 @@ public final class FSProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getDefaultInstanceForType() {
@ -1027,6 +794,16 @@ public final class FSProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference(this);
int from_bitField0_ = bitField0_;
@ -1081,38 +858,55 @@ public final class FSProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
splitkey_ = input.readBytes();
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
range_ = value;
}
break;
}
}
}
}
private int bitField0_;
// required bytes splitkey = 1;
private com.google.protobuf.ByteString splitkey_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes splitkey = 1;</code>
*/
public boolean hasSplitkey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes splitkey = 1;</code>
*/
public com.google.protobuf.ByteString getSplitkey() {
return splitkey_;
}
/**
* <code>required bytes splitkey = 1;</code>
*/
public Builder setSplitkey(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -1122,9 +916,6 @@ public final class FSProtos {
onChanged();
return this;
}
/**
* <code>required bytes splitkey = 1;</code>
*/
public Builder clearSplitkey() {
bitField0_ = (bitField0_ & ~0x00000001);
splitkey_ = getDefaultInstance().getSplitkey();
@ -1134,21 +925,12 @@ public final class FSProtos {
// required .Reference.Range range = 2;
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP;
/**
* <code>required .Reference.Range range = 2;</code>
*/
public boolean hasRange() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .Reference.Range range = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() {
return range_;
}
/**
* <code>required .Reference.Range range = 2;</code>
*/
public Builder setRange(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value) {
if (value == null) {
throw new NullPointerException();
@ -1158,9 +940,6 @@ public final class FSProtos {
onChanged();
return this;
}
/**
* <code>required .Reference.Range range = 2;</code>
*/
public Builder clearRange() {
bitField0_ = (bitField0_ & ~0x00000002);
range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP;
@ -1215,13 +994,17 @@ public final class FSProtos {
internal_static_HBaseVersionFileContent_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_HBaseVersionFileContent_descriptor,
new java.lang.String[] { "Version", });
new java.lang.String[] { "Version", },
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class,
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class);
internal_static_Reference_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_Reference_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_Reference_descriptor,
new java.lang.String[] { "Splitkey", "Range", });
new java.lang.String[] { "Splitkey", "Range", },
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class,
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class);
return null;
}
};

View File

@ -12,27 +12,17 @@ public final class LoadBalancerProtos {
extends com.google.protobuf.MessageOrBuilder {
// optional bool balancerOn = 1;
/**
* <code>optional bool balancerOn = 1;</code>
*/
boolean hasBalancerOn();
/**
* <code>optional bool balancerOn = 1;</code>
*/
boolean getBalancerOn();
}
/**
* Protobuf type {@code LoadBalancerState}
*/
public static final class LoadBalancerState extends
com.google.protobuf.GeneratedMessage
implements LoadBalancerStateOrBuilder {
// Use LoadBalancerState.newBuilder() to construct.
private LoadBalancerState(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private LoadBalancerState(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private LoadBalancerState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private LoadBalancerState(boolean noInit) {}
private static final LoadBalancerState defaultInstance;
public static LoadBalancerState getDefaultInstance() {
@ -43,52 +33,6 @@ public final class LoadBalancerProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LoadBalancerState(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
balancerOn_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
@ -96,39 +40,16 @@ public final class LoadBalancerProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
}
public static com.google.protobuf.Parser<LoadBalancerState> PARSER =
new com.google.protobuf.AbstractParser<LoadBalancerState>() {
public LoadBalancerState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new LoadBalancerState(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<LoadBalancerState> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable;
}
private int bitField0_;
// optional bool balancerOn = 1;
public static final int BALANCERON_FIELD_NUMBER = 1;
private boolean balancerOn_;
/**
* <code>optional bool balancerOn = 1;</code>
*/
public boolean hasBalancerOn() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool balancerOn = 1;</code>
*/
public boolean getBalancerOn() {
return balancerOn_;
}
@ -197,12 +118,8 @@ public final class LoadBalancerProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasBalancerOn()) {
@ -210,61 +127,74 @@ public final class LoadBalancerProtos {
hash = (53 * hash) + hashBoolean(getBalancerOn());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -280,9 +210,6 @@ public final class LoadBalancerProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code LoadBalancerState}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder {
@ -293,9 +220,7 @@ public final class LoadBalancerProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.newBuilder()
@ -303,8 +228,7 @@ public final class LoadBalancerProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -329,7 +253,7 @@ public final class LoadBalancerProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstanceForType() {
@ -344,6 +268,16 @@ public final class LoadBalancerProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = new org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState(this);
int from_bitField0_ = bitField0_;
@ -383,47 +317,50 @@ public final class LoadBalancerProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
balancerOn_ = input.readBool();
break;
}
}
}
}
private int bitField0_;
// optional bool balancerOn = 1;
private boolean balancerOn_ ;
/**
* <code>optional bool balancerOn = 1;</code>
*/
public boolean hasBalancerOn() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool balancerOn = 1;</code>
*/
public boolean getBalancerOn() {
return balancerOn_;
}
/**
* <code>optional bool balancerOn = 1;</code>
*/
public Builder setBalancerOn(boolean value) {
bitField0_ |= 0x00000001;
balancerOn_ = value;
onChanged();
return this;
}
/**
* <code>optional bool balancerOn = 1;</code>
*/
public Builder clearBalancerOn() {
bitField0_ = (bitField0_ & ~0x00000001);
balancerOn_ = false;
@ -471,7 +408,9 @@ public final class LoadBalancerProtos {
internal_static_LoadBalancerState_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_LoadBalancerState_descriptor,
new java.lang.String[] { "BalancerOn", });
new java.lang.String[] { "BalancerOn", },
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class,
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
return null;
}
};

View File

@ -12,42 +12,23 @@ public final class MapReduceProtos {
extends com.google.protobuf.MessageOrBuilder {
// repeated .NameInt64Pair metrics = 1;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair>
getMetricsList();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index);
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
int getMetricsCount();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index);
}
/**
* Protobuf type {@code ScanMetrics}
*/
public static final class ScanMetrics extends
com.google.protobuf.GeneratedMessage
implements ScanMetricsOrBuilder {
// Use ScanMetrics.newBuilder() to construct.
private ScanMetrics(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private ScanMetrics(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ScanMetrics(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private ScanMetrics(boolean noInit) {}
private static final ScanMetrics defaultInstance;
public static ScanMetrics getDefaultInstance() {
@ -58,58 +39,6 @@ public final class MapReduceProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ScanMetrics(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
metrics_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair>();
mutable_bitField0_ |= 0x00000001;
}
metrics_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
metrics_ = java.util.Collections.unmodifiableList(metrics_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
@ -117,57 +46,25 @@ public final class MapReduceProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
}
public static com.google.protobuf.Parser<ScanMetrics> PARSER =
new com.google.protobuf.AbstractParser<ScanMetrics>() {
public ScanMetrics parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ScanMetrics(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ScanMetrics> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable;
}
// repeated .NameInt64Pair metrics = 1;
public static final int METRICS_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> metrics_;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
return metrics_;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList() {
return metrics_;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public int getMetricsCount() {
return metrics_.size();
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
return metrics_.get(index);
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index) {
return metrics_.get(index);
@ -234,12 +131,8 @@ public final class MapReduceProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getMetricsCount() > 0) {
@ -247,61 +140,74 @@ public final class MapReduceProtos {
hash = (53 * hash) + getMetricsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -317,9 +223,6 @@ public final class MapReduceProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ScanMetrics}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder {
@ -330,9 +233,7 @@ public final class MapReduceProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder()
@ -340,8 +241,7 @@ public final class MapReduceProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -371,7 +271,7 @@ public final class MapReduceProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getDefaultInstanceForType() {
@ -386,6 +286,16 @@ public final class MapReduceProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = new org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics(this);
int from_bitField0_ = bitField0_;
@ -451,19 +361,35 @@ public final class MapReduceProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addMetrics(subBuilder.buildPartial());
break;
}
}
}
}
private int bitField0_;
// repeated .NameInt64Pair metrics = 1;
@ -479,9 +405,6 @@ public final class MapReduceProtos {
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder> metricsBuilder_;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
if (metricsBuilder_ == null) {
return java.util.Collections.unmodifiableList(metrics_);
@ -489,9 +412,6 @@ public final class MapReduceProtos {
return metricsBuilder_.getMessageList();
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public int getMetricsCount() {
if (metricsBuilder_ == null) {
return metrics_.size();
@ -499,9 +419,6 @@ public final class MapReduceProtos {
return metricsBuilder_.getCount();
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
if (metricsBuilder_ == null) {
return metrics_.get(index);
@ -509,9 +426,6 @@ public final class MapReduceProtos {
return metricsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder setMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
@ -526,9 +440,6 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder setMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
if (metricsBuilder_ == null) {
@ -540,9 +451,6 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
if (value == null) {
@ -556,9 +464,6 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
@ -573,9 +478,6 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
if (metricsBuilder_ == null) {
@ -587,9 +489,6 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
if (metricsBuilder_ == null) {
@ -601,9 +500,6 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addAllMetrics(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> values) {
if (metricsBuilder_ == null) {
@ -615,9 +511,6 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder clearMetrics() {
if (metricsBuilder_ == null) {
metrics_ = java.util.Collections.emptyList();
@ -628,9 +521,6 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder removeMetrics(int index) {
if (metricsBuilder_ == null) {
ensureMetricsIsMutable();
@ -641,16 +531,10 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder getMetricsBuilder(
int index) {
return getMetricsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index) {
if (metricsBuilder_ == null) {
@ -658,9 +542,6 @@ public final class MapReduceProtos {
return metricsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList() {
if (metricsBuilder_ != null) {
@ -669,24 +550,15 @@ public final class MapReduceProtos {
return java.util.Collections.unmodifiableList(metrics_);
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder() {
return getMetricsFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder(
int index) {
return getMetricsFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder>
getMetricsBuilderList() {
return getMetricsFieldBuilder().getBuilderList();
@ -746,7 +618,9 @@ public final class MapReduceProtos {
internal_static_ScanMetrics_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ScanMetrics_descriptor,
new java.lang.String[] { "Metrics", });
new java.lang.String[] { "Metrics", },
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class,
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
return null;
}
};

View File

@ -11,18 +11,14 @@ public final class MasterProtos {
public interface IsMasterRunningRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code IsMasterRunningRequest}
*/
public static final class IsMasterRunningRequest extends
com.google.protobuf.GeneratedMessage
implements IsMasterRunningRequestOrBuilder {
// Use IsMasterRunningRequest.newBuilder() to construct.
private IsMasterRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private IsMasterRunningRequest(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private IsMasterRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private IsMasterRunningRequest(boolean noInit) {}
private static final IsMasterRunningRequest defaultInstance;
public static IsMasterRunningRequest getDefaultInstance() {
@ -33,46 +29,6 @@ public final class MasterProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private IsMasterRunningRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor;
@ -80,24 +36,7 @@ public final class MasterProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class);
}
public static com.google.protobuf.Parser<IsMasterRunningRequest> PARSER =
new com.google.protobuf.AbstractParser<IsMasterRunningRequest>() {
public IsMasterRunningRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new IsMasterRunningRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<IsMasterRunningRequest> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable;
}
private void initFields() {
@ -151,70 +90,79 @@ public final class MasterProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -230,9 +178,6 @@ public final class MasterProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code IsMasterRunningRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequestOrBuilder {
@ -243,9 +188,7 @@ public final class MasterProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.newBuilder()
@ -253,8 +196,7 @@ public final class MasterProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -277,7 +219,7 @@ public final class MasterProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest getDefaultInstanceForType() {
@ -292,6 +234,16 @@ public final class MasterProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest(this);
onBuilt();
@ -321,19 +273,29 @@ public final class MasterProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:IsMasterRunningRequest)
}
@ -350,27 +312,17 @@ public final class MasterProtos {
extends com.google.protobuf.MessageOrBuilder {
// required bool isMasterRunning = 1;
/**
* <code>required bool isMasterRunning = 1;</code>
*/
boolean hasIsMasterRunning();
/**
* <code>required bool isMasterRunning = 1;</code>
*/
boolean getIsMasterRunning();
}
/**
* Protobuf type {@code IsMasterRunningResponse}
*/
public static final class IsMasterRunningResponse extends
com.google.protobuf.GeneratedMessage
implements IsMasterRunningResponseOrBuilder {
// Use IsMasterRunningResponse.newBuilder() to construct.
private IsMasterRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private IsMasterRunningResponse(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private IsMasterRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private IsMasterRunningResponse(boolean noInit) {}
private static final IsMasterRunningResponse defaultInstance;
public static IsMasterRunningResponse getDefaultInstance() {
@ -381,52 +333,6 @@ public final class MasterProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private IsMasterRunningResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
isMasterRunning_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor;
@ -434,39 +340,16 @@ public final class MasterProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class);
}
public static com.google.protobuf.Parser<IsMasterRunningResponse> PARSER =
new com.google.protobuf.AbstractParser<IsMasterRunningResponse>() {
public IsMasterRunningResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new IsMasterRunningResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<IsMasterRunningResponse> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable;
}
private int bitField0_;
// required bool isMasterRunning = 1;
public static final int ISMASTERRUNNING_FIELD_NUMBER = 1;
private boolean isMasterRunning_;
/**
* <code>required bool isMasterRunning = 1;</code>
*/
public boolean hasIsMasterRunning() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool isMasterRunning = 1;</code>
*/
public boolean getIsMasterRunning() {
return isMasterRunning_;
}
@ -539,12 +422,8 @@ public final class MasterProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasIsMasterRunning()) {
@ -552,61 +431,74 @@ public final class MasterProtos {
hash = (53 * hash) + hashBoolean(getIsMasterRunning());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -622,9 +514,6 @@ public final class MasterProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code IsMasterRunningResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponseOrBuilder {
@ -635,9 +524,7 @@ public final class MasterProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.newBuilder()
@ -645,8 +532,7 @@ public final class MasterProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -671,7 +557,7 @@ public final class MasterProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse getDefaultInstanceForType() {
@ -686,6 +572,16 @@ public final class MasterProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse(this);
int from_bitField0_ = bitField0_;
@ -729,47 +625,50 @@ public final class MasterProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
isMasterRunning_ = input.readBool();
break;
}
}
}
}
private int bitField0_;
// required bool isMasterRunning = 1;
private boolean isMasterRunning_ ;
/**
* <code>required bool isMasterRunning = 1;</code>
*/
public boolean hasIsMasterRunning() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool isMasterRunning = 1;</code>
*/
public boolean getIsMasterRunning() {
return isMasterRunning_;
}
/**
* <code>required bool isMasterRunning = 1;</code>
*/
public Builder setIsMasterRunning(boolean value) {
bitField0_ |= 0x00000001;
isMasterRunning_ = value;
onChanged();
return this;
}
/**
* <code>required bool isMasterRunning = 1;</code>
*/
public Builder clearIsMasterRunning() {
bitField0_ = (bitField0_ & ~0x00000001);
isMasterRunning_ = false;
@ -788,21 +687,11 @@ public final class MasterProtos {
// @@protoc_insertion_point(class_scope:IsMasterRunningResponse)
}
/**
* Protobuf service {@code MasterService}
*/
public static abstract class MasterService
implements com.google.protobuf.Service {
protected MasterService() {}
public interface Interface {
/**
* <code>rpc isMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse);</code>
*
* <pre>
** return true if master is available
* </pre>
*/
public abstract void isMasterRunning(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request,
@ -885,13 +774,6 @@ public final class MasterProtos {
};
}
/**
* <code>rpc isMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse);</code>
*
* <pre>
** return true if master is available
* </pre>
*/
public abstract void isMasterRunning(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request,
@ -1024,8 +906,6 @@ public final class MasterProtos {
}
}
// @@protoc_insertion_point(class_scope:MasterService)
}
private static com.google.protobuf.Descriptors.Descriptor
@ -1065,13 +945,17 @@ public final class MasterProtos {
internal_static_IsMasterRunningRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_IsMasterRunningRequest_descriptor,
new java.lang.String[] { });
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class);
internal_static_IsMasterRunningResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_IsMasterRunningResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_IsMasterRunningResponse_descriptor,
new java.lang.String[] { "IsMasterRunning", });
new java.lang.String[] { "IsMasterRunning", },
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class);
return null;
}
};

View File

@ -12,42 +12,23 @@ public final class MultiRowMutation {
extends com.google.protobuf.MessageOrBuilder {
// repeated .MutationProto mutationRequest = 1;
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto>
getMutationRequestList();
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index);
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
int getMutationRequestCount();
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationRequestOrBuilderList();
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder(
int index);
}
/**
* Protobuf type {@code MultiMutateRequest}
*/
public static final class MultiMutateRequest extends
com.google.protobuf.GeneratedMessage
implements MultiMutateRequestOrBuilder {
// Use MultiMutateRequest.newBuilder() to construct.
private MultiMutateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private MultiMutateRequest(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiMutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private MultiMutateRequest(boolean noInit) {}
private static final MultiMutateRequest defaultInstance;
public static MultiMutateRequest getDefaultInstance() {
@ -58,58 +39,6 @@ public final class MultiRowMutation {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiMutateRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
mutationRequest_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto>();
mutable_bitField0_ |= 0x00000001;
}
mutationRequest_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
mutationRequest_ = java.util.Collections.unmodifiableList(mutationRequest_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor;
@ -117,57 +46,25 @@ public final class MultiRowMutation {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class);
}
public static com.google.protobuf.Parser<MultiMutateRequest> PARSER =
new com.google.protobuf.AbstractParser<MultiMutateRequest>() {
public MultiMutateRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiMutateRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiMutateRequest> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable;
}
// repeated .MutationProto mutationRequest = 1;
public static final int MUTATIONREQUEST_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> mutationRequest_;
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> getMutationRequestList() {
return mutationRequest_;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationRequestOrBuilderList() {
return mutationRequest_;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public int getMutationRequestCount() {
return mutationRequest_.size();
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) {
return mutationRequest_.get(index);
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder(
int index) {
return mutationRequest_.get(index);
@ -240,12 +137,8 @@ public final class MultiRowMutation {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getMutationRequestCount() > 0) {
@ -253,61 +146,74 @@ public final class MultiRowMutation {
hash = (53 * hash) + getMutationRequestList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -323,9 +229,6 @@ public final class MultiRowMutation {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiMutateRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequestOrBuilder {
@ -336,9 +239,7 @@ public final class MultiRowMutation {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.newBuilder()
@ -346,8 +247,7 @@ public final class MultiRowMutation {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -377,7 +277,7 @@ public final class MultiRowMutation {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest getDefaultInstanceForType() {
@ -392,6 +292,16 @@ public final class MultiRowMutation {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest(this);
int from_bitField0_ = bitField0_;
@ -463,19 +373,35 @@ public final class MultiRowMutation {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addMutationRequest(subBuilder.buildPartial());
break;
}
}
}
}
private int bitField0_;
// repeated .MutationProto mutationRequest = 1;
@ -491,9 +417,6 @@ public final class MultiRowMutation {
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationRequestBuilder_;
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> getMutationRequestList() {
if (mutationRequestBuilder_ == null) {
return java.util.Collections.unmodifiableList(mutationRequest_);
@ -501,9 +424,6 @@ public final class MultiRowMutation {
return mutationRequestBuilder_.getMessageList();
}
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public int getMutationRequestCount() {
if (mutationRequestBuilder_ == null) {
return mutationRequest_.size();
@ -511,9 +431,6 @@ public final class MultiRowMutation {
return mutationRequestBuilder_.getCount();
}
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) {
if (mutationRequestBuilder_ == null) {
return mutationRequest_.get(index);
@ -521,9 +438,6 @@ public final class MultiRowMutation {
return mutationRequestBuilder_.getMessage(index);
}
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public Builder setMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationRequestBuilder_ == null) {
@ -538,9 +452,6 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public Builder setMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
if (mutationRequestBuilder_ == null) {
@ -552,9 +463,6 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public Builder addMutationRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationRequestBuilder_ == null) {
if (value == null) {
@ -568,9 +476,6 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public Builder addMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationRequestBuilder_ == null) {
@ -585,9 +490,6 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public Builder addMutationRequest(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
if (mutationRequestBuilder_ == null) {
@ -599,9 +501,6 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public Builder addMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
if (mutationRequestBuilder_ == null) {
@ -613,9 +512,6 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public Builder addAllMutationRequest(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> values) {
if (mutationRequestBuilder_ == null) {
@ -627,9 +523,6 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public Builder clearMutationRequest() {
if (mutationRequestBuilder_ == null) {
mutationRequest_ = java.util.Collections.emptyList();
@ -640,9 +533,6 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public Builder removeMutationRequest(int index) {
if (mutationRequestBuilder_ == null) {
ensureMutationRequestIsMutable();
@ -653,16 +543,10 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationRequestBuilder(
int index) {
return getMutationRequestFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder(
int index) {
if (mutationRequestBuilder_ == null) {
@ -670,9 +554,6 @@ public final class MultiRowMutation {
return mutationRequestBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationRequestOrBuilderList() {
if (mutationRequestBuilder_ != null) {
@ -681,24 +562,15 @@ public final class MultiRowMutation {
return java.util.Collections.unmodifiableList(mutationRequest_);
}
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder() {
return getMutationRequestFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance());
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder(
int index) {
return getMutationRequestFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance());
}
/**
* <code>repeated .MutationProto mutationRequest = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder>
getMutationRequestBuilderList() {
return getMutationRequestFieldBuilder().getBuilderList();
@ -732,18 +604,14 @@ public final class MultiRowMutation {
public interface MultiMutateResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiMutateResponse}
*/
public static final class MultiMutateResponse extends
com.google.protobuf.GeneratedMessage
implements MultiMutateResponseOrBuilder {
// Use MultiMutateResponse.newBuilder() to construct.
private MultiMutateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private MultiMutateResponse(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiMutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private MultiMutateResponse(boolean noInit) {}
private static final MultiMutateResponse defaultInstance;
public static MultiMutateResponse getDefaultInstance() {
@ -754,46 +622,6 @@ public final class MultiRowMutation {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiMutateResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor;
@ -801,24 +629,7 @@ public final class MultiRowMutation {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class);
}
public static com.google.protobuf.Parser<MultiMutateResponse> PARSER =
new com.google.protobuf.AbstractParser<MultiMutateResponse>() {
public MultiMutateResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiMutateResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiMutateResponse> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable;
}
private void initFields() {
@ -872,70 +683,79 @@ public final class MultiRowMutation {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -951,9 +771,6 @@ public final class MultiRowMutation {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiMutateResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponseOrBuilder {
@ -964,9 +781,7 @@ public final class MultiRowMutation {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.newBuilder()
@ -974,8 +789,7 @@ public final class MultiRowMutation {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -998,7 +812,7 @@ public final class MultiRowMutation {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse getDefaultInstanceForType() {
@ -1013,6 +827,16 @@ public final class MultiRowMutation {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse(this);
onBuilt();
@ -1042,19 +866,29 @@ public final class MultiRowMutation {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:MultiMutateResponse)
}
@ -1067,17 +901,11 @@ public final class MultiRowMutation {
// @@protoc_insertion_point(class_scope:MultiMutateResponse)
}
/**
* Protobuf service {@code MultiRowMutationService}
*/
public static abstract class MultiRowMutationService
implements com.google.protobuf.Service {
protected MultiRowMutationService() {}
public interface Interface {
/**
* <code>rpc mutateRows(.MultiMutateRequest) returns (.MultiMutateResponse);</code>
*/
public abstract void mutateRows(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request,
@ -1160,9 +988,6 @@ public final class MultiRowMutation {
};
}
/**
* <code>rpc mutateRows(.MultiMutateRequest) returns (.MultiMutateResponse);</code>
*/
public abstract void mutateRows(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request,
@ -1295,8 +1120,6 @@ public final class MultiRowMutation {
}
}
// @@protoc_insertion_point(class_scope:MultiRowMutationService)
}
private static com.google.protobuf.Descriptors.Descriptor
@ -1336,13 +1159,17 @@ public final class MultiRowMutation {
internal_static_MultiMutateRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiMutateRequest_descriptor,
new java.lang.String[] { "MutationRequest", });
new java.lang.String[] { "MutationRequest", },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class);
internal_static_MultiMutateResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_MultiMutateResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiMutateResponse_descriptor,
new java.lang.String[] { });
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class);
return null;
}
};

View File

@ -11,18 +11,14 @@ public final class MultiRowMutationProcessorProtos {
public interface MultiRowMutationProcessorRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiRowMutationProcessorRequest}
*/
public static final class MultiRowMutationProcessorRequest extends
com.google.protobuf.GeneratedMessage
implements MultiRowMutationProcessorRequestOrBuilder {
// Use MultiRowMutationProcessorRequest.newBuilder() to construct.
private MultiRowMutationProcessorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private MultiRowMutationProcessorRequest(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiRowMutationProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private MultiRowMutationProcessorRequest(boolean noInit) {}
private static final MultiRowMutationProcessorRequest defaultInstance;
public static MultiRowMutationProcessorRequest getDefaultInstance() {
@ -33,46 +29,6 @@ public final class MultiRowMutationProcessorProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiRowMutationProcessorRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
@ -80,24 +36,7 @@ public final class MultiRowMutationProcessorProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
}
public static com.google.protobuf.Parser<MultiRowMutationProcessorRequest> PARSER =
new com.google.protobuf.AbstractParser<MultiRowMutationProcessorRequest>() {
public MultiRowMutationProcessorRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiRowMutationProcessorRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiRowMutationProcessorRequest> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable;
}
private void initFields() {
@ -151,70 +90,79 @@ public final class MultiRowMutationProcessorProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -230,9 +178,6 @@ public final class MultiRowMutationProcessorProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiRowMutationProcessorRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequestOrBuilder {
@ -243,9 +188,7 @@ public final class MultiRowMutationProcessorProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.newBuilder()
@ -253,8 +196,7 @@ public final class MultiRowMutationProcessorProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -277,7 +219,7 @@ public final class MultiRowMutationProcessorProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest getDefaultInstanceForType() {
@ -292,6 +234,16 @@ public final class MultiRowMutationProcessorProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest(this);
onBuilt();
@ -321,19 +273,29 @@ public final class MultiRowMutationProcessorProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorRequest)
}
@ -349,18 +311,14 @@ public final class MultiRowMutationProcessorProtos {
public interface MultiRowMutationProcessorResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiRowMutationProcessorResponse}
*/
public static final class MultiRowMutationProcessorResponse extends
com.google.protobuf.GeneratedMessage
implements MultiRowMutationProcessorResponseOrBuilder {
// Use MultiRowMutationProcessorResponse.newBuilder() to construct.
private MultiRowMutationProcessorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private MultiRowMutationProcessorResponse(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiRowMutationProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private MultiRowMutationProcessorResponse(boolean noInit) {}
private static final MultiRowMutationProcessorResponse defaultInstance;
public static MultiRowMutationProcessorResponse getDefaultInstance() {
@ -371,46 +329,6 @@ public final class MultiRowMutationProcessorProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiRowMutationProcessorResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
@ -418,24 +336,7 @@ public final class MultiRowMutationProcessorProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
}
public static com.google.protobuf.Parser<MultiRowMutationProcessorResponse> PARSER =
new com.google.protobuf.AbstractParser<MultiRowMutationProcessorResponse>() {
public MultiRowMutationProcessorResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiRowMutationProcessorResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiRowMutationProcessorResponse> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
}
private void initFields() {
@ -489,70 +390,79 @@ public final class MultiRowMutationProcessorProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -568,9 +478,6 @@ public final class MultiRowMutationProcessorProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiRowMutationProcessorResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponseOrBuilder {
@ -581,9 +488,7 @@ public final class MultiRowMutationProcessorProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.newBuilder()
@ -591,8 +496,7 @@ public final class MultiRowMutationProcessorProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -615,7 +519,7 @@ public final class MultiRowMutationProcessorProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse getDefaultInstanceForType() {
@ -630,6 +534,16 @@ public final class MultiRowMutationProcessorProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse(this);
onBuilt();
@ -659,19 +573,29 @@ public final class MultiRowMutationProcessorProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorResponse)
}
@ -719,13 +643,17 @@ public final class MultiRowMutationProcessorProtos {
internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiRowMutationProcessorRequest_descriptor,
new java.lang.String[] { });
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
internal_static_MultiRowMutationProcessorResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiRowMutationProcessorResponse_descriptor,
new java.lang.String[] { });
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
return null;
}
};

View File

@ -12,45 +12,21 @@ public final class Tracing {
extends com.google.protobuf.MessageOrBuilder {
// optional int64 traceId = 1;
/**
* <code>optional int64 traceId = 1;</code>
*/
boolean hasTraceId();
/**
* <code>optional int64 traceId = 1;</code>
*/
long getTraceId();
// optional int64 parentId = 2;
/**
* <code>optional int64 parentId = 2;</code>
*/
boolean hasParentId();
/**
* <code>optional int64 parentId = 2;</code>
*/
long getParentId();
}
/**
* Protobuf type {@code RPCTInfo}
*
* <pre>
*Used to pass through the information necessary to continue
*a trace after an RPC is made. All we need is the traceid
*(so we know the overarching trace this message is a part of), and
*the id of the current span when this message was sent, so we know
*what span caused the new span we will create when this message is received.
* </pre>
*/
public static final class RPCTInfo extends
com.google.protobuf.GeneratedMessage
implements RPCTInfoOrBuilder {
// Use RPCTInfo.newBuilder() to construct.
private RPCTInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private RPCTInfo(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RPCTInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private RPCTInfo(boolean noInit) {}
private static final RPCTInfo defaultInstance;
public static RPCTInfo getDefaultInstance() {
@ -61,57 +37,6 @@ public final class Tracing {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RPCTInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
traceId_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
parentId_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
@ -119,39 +44,16 @@ public final class Tracing {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
}
public static com.google.protobuf.Parser<RPCTInfo> PARSER =
new com.google.protobuf.AbstractParser<RPCTInfo>() {
public RPCTInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RPCTInfo(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RPCTInfo> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
}
private int bitField0_;
// optional int64 traceId = 1;
public static final int TRACEID_FIELD_NUMBER = 1;
private long traceId_;
/**
* <code>optional int64 traceId = 1;</code>
*/
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 traceId = 1;</code>
*/
public long getTraceId() {
return traceId_;
}
@ -159,15 +61,9 @@ public final class Tracing {
// optional int64 parentId = 2;
public static final int PARENTID_FIELD_NUMBER = 2;
private long parentId_;
/**
* <code>optional int64 parentId = 2;</code>
*/
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 parentId = 2;</code>
*/
public long getParentId() {
return parentId_;
}
@ -249,12 +145,8 @@ public final class Tracing {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasTraceId()) {
@ -266,61 +158,74 @@ public final class Tracing {
hash = (53 * hash) + hashLong(getParentId());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -336,17 +241,6 @@ public final class Tracing {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code RPCTInfo}
*
* <pre>
*Used to pass through the information necessary to continue
*a trace after an RPC is made. All we need is the traceid
*(so we know the overarching trace this message is a part of), and
*the id of the current span when this message was sent, so we know
*what span caused the new span we will create when this message is received.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder {
@ -357,9 +251,7 @@ public final class Tracing {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder()
@ -367,8 +259,7 @@ public final class Tracing {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -395,7 +286,7 @@ public final class Tracing {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDescriptor();
}
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getDefaultInstanceForType() {
@ -410,6 +301,16 @@ public final class Tracing {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo(this);
int from_bitField0_ = bitField0_;
@ -456,47 +357,55 @@ public final class Tracing {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
traceId_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
parentId_ = input.readInt64();
break;
}
}
}
}
private int bitField0_;
// optional int64 traceId = 1;
private long traceId_ ;
/**
* <code>optional int64 traceId = 1;</code>
*/
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 traceId = 1;</code>
*/
public long getTraceId() {
return traceId_;
}
/**
* <code>optional int64 traceId = 1;</code>
*/
public Builder setTraceId(long value) {
bitField0_ |= 0x00000001;
traceId_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 traceId = 1;</code>
*/
public Builder clearTraceId() {
bitField0_ = (bitField0_ & ~0x00000001);
traceId_ = 0L;
@ -506,30 +415,18 @@ public final class Tracing {
// optional int64 parentId = 2;
private long parentId_ ;
/**
* <code>optional int64 parentId = 2;</code>
*/
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 parentId = 2;</code>
*/
public long getParentId() {
return parentId_;
}
/**
* <code>optional int64 parentId = 2;</code>
*/
public Builder setParentId(long value) {
bitField0_ |= 0x00000002;
parentId_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 parentId = 2;</code>
*/
public Builder clearParentId() {
bitField0_ = (bitField0_ & ~0x00000002);
parentId_ = 0L;
@ -577,7 +474,9 @@ public final class Tracing {
internal_static_RPCTInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_RPCTInfo_descriptor,
new java.lang.String[] { "TraceId", "ParentId", });
new java.lang.String[] { "TraceId", "ParentId", },
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class,
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
return null;
}
};

View File

@ -348,17 +348,19 @@ service MasterAdminService {
/**
* Create a snapshot for the given table.
* @param snapshot description of the snapshot to take
*/
rpc snapshot(TakeSnapshotRequest) returns(TakeSnapshotResponse);
/**
* List completed snapshots.
* Return a list of snapshot descriptors for completed snapshots
* @return a list of snapshot descriptors for completed snapshots
*/
rpc getCompletedSnapshots(ListSnapshotRequest) returns(ListSnapshotResponse);
/**
* Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
* @param snapshotName snapshot to delete
*/
rpc deleteSnapshot(DeleteSnapshotRequest) returns(DeleteSnapshotResponse);
@ -369,6 +371,7 @@ service MasterAdminService {
/**
* Restore a snapshot
* @param snapshot description of the snapshot to restore
*/
rpc restoreSnapshot(RestoreSnapshotRequest) returns(RestoreSnapshotResponse);

View File

@ -119,7 +119,7 @@ import com.google.common.base.Function;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import com.google.protobuf.Message.Builder;
import com.google.protobuf.TextFormat;
// Uses Writables doing sasl
@ -1673,7 +1673,8 @@ public abstract class HBaseServer implements RpcServer {
CodedInputStream cis = CodedInputStream.newInstance(buf, offset, buf.length);
int headerSize = cis.readRawVarint32();
offset = cis.getTotalBytesRead();
RequestHeader header = RequestHeader.PARSER.parseFrom(buf, offset, headerSize);
RequestHeader header =
RequestHeader.newBuilder().mergeFrom(buf, offset, headerSize).build();
offset += headerSize;
int id = header.getCallId();
if (LOG.isDebugEnabled()) {
@ -1700,13 +1701,15 @@ public abstract class HBaseServer implements RpcServer {
Message m = methodCache.getMethodArgType(method);
// Check that there is a param to deserialize.
if (m != null) {
Parser<? extends Message> parser = m.getParserForType();
Builder builder = null;
builder = m.newBuilderForType();
// To read the varint, I need an inputstream; might as well be a CIS.
cis = CodedInputStream.newInstance(buf, offset, buf.length);
int paramSize = cis.readRawVarint32();
offset += cis.getTotalBytesRead();
if (parser != null) {
param = parser.parseFrom(buf, offset, paramSize);
if (builder != null) {
builder.mergeFrom(buf, offset, paramSize);
param = builder.build();
}
offset += paramSize;
}

View File

@ -184,6 +184,7 @@ import com.google.protobuf.Service;
@InterfaceAudience.Private
public class HRegion implements HeapSize { // , Writable{
public static final Log LOG = LogFactory.getLog(HRegion.class);
private static final String MERGEDIR = ".merges";
public static final String LOAD_CFS_ON_DEMAND_CONFIG_KEY =
"hbase.hregion.scan.loadColumnFamiliesOnDemand";

View File

@ -12,65 +12,29 @@ public final class CellMessage {
extends com.google.protobuf.MessageOrBuilder {
// optional bytes row = 1;
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
boolean hasRow();
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
com.google.protobuf.ByteString getRow();
// optional bytes column = 2;
/**
* <code>optional bytes column = 2;</code>
*/
boolean hasColumn();
/**
* <code>optional bytes column = 2;</code>
*/
com.google.protobuf.ByteString getColumn();
// optional int64 timestamp = 3;
/**
* <code>optional int64 timestamp = 3;</code>
*/
boolean hasTimestamp();
/**
* <code>optional int64 timestamp = 3;</code>
*/
long getTimestamp();
// optional bytes data = 4;
/**
* <code>optional bytes data = 4;</code>
*/
boolean hasData();
/**
* <code>optional bytes data = 4;</code>
*/
com.google.protobuf.ByteString getData();
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
*/
public static final class Cell extends
com.google.protobuf.GeneratedMessage
implements CellOrBuilder {
// Use Cell.newBuilder() to construct.
private Cell(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private Cell(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Cell(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private Cell(boolean noInit) {}
private static final Cell defaultInstance;
public static Cell getDefaultInstance() {
@ -81,67 +45,6 @@ public final class CellMessage {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Cell(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
row_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
column_ = input.readBytes();
break;
}
case 24: {
bitField0_ |= 0x00000004;
timestamp_ = input.readInt64();
break;
}
case 34: {
bitField0_ |= 0x00000008;
data_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
@ -149,47 +52,16 @@ public final class CellMessage {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
}
public static com.google.protobuf.Parser<Cell> PARSER =
new com.google.protobuf.AbstractParser<Cell>() {
public Cell parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Cell(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Cell> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable;
}
private int bitField0_;
// optional bytes row = 1;
public static final int ROW_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString row_;
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
@ -197,15 +69,9 @@ public final class CellMessage {
// optional bytes column = 2;
public static final int COLUMN_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString column_;
/**
* <code>optional bytes column = 2;</code>
*/
public boolean hasColumn() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes column = 2;</code>
*/
public com.google.protobuf.ByteString getColumn() {
return column_;
}
@ -213,15 +79,9 @@ public final class CellMessage {
// optional int64 timestamp = 3;
public static final int TIMESTAMP_FIELD_NUMBER = 3;
private long timestamp_;
/**
* <code>optional int64 timestamp = 3;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 timestamp = 3;</code>
*/
public long getTimestamp() {
return timestamp_;
}
@ -229,15 +89,9 @@ public final class CellMessage {
// optional bytes data = 4;
public static final int DATA_FIELD_NUMBER = 4;
private com.google.protobuf.ByteString data_;
/**
* <code>optional bytes data = 4;</code>
*/
public boolean hasData() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bytes data = 4;</code>
*/
public com.google.protobuf.ByteString getData() {
return data_;
}
@ -312,54 +166,68 @@ public final class CellMessage {
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -375,9 +243,6 @@ public final class CellMessage {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.CellOrBuilder {
@ -388,9 +253,7 @@ public final class CellMessage {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.newBuilder()
@ -398,8 +261,7 @@ public final class CellMessage {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -430,7 +292,7 @@ public final class CellMessage {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.getDescriptor();
}
public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell getDefaultInstanceForType() {
@ -445,6 +307,16 @@ public final class CellMessage {
return result;
}
private org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell buildPartial() {
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell(this);
int from_bitField0_ = bitField0_;
@ -505,50 +377,59 @@ public final class CellMessage {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
row_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
column_ = input.readBytes();
break;
}
case 24: {
bitField0_ |= 0x00000004;
timestamp_ = input.readInt64();
break;
}
case 34: {
bitField0_ |= 0x00000008;
data_ = input.readBytes();
break;
}
}
}
}
private int bitField0_;
// optional bytes row = 1;
private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public Builder setRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -558,13 +439,6 @@ public final class CellMessage {
onChanged();
return this;
}
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public Builder clearRow() {
bitField0_ = (bitField0_ & ~0x00000001);
row_ = getDefaultInstance().getRow();
@ -574,21 +448,12 @@ public final class CellMessage {
// optional bytes column = 2;
private com.google.protobuf.ByteString column_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes column = 2;</code>
*/
public boolean hasColumn() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes column = 2;</code>
*/
public com.google.protobuf.ByteString getColumn() {
return column_;
}
/**
* <code>optional bytes column = 2;</code>
*/
public Builder setColumn(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -598,9 +463,6 @@ public final class CellMessage {
onChanged();
return this;
}
/**
* <code>optional bytes column = 2;</code>
*/
public Builder clearColumn() {
bitField0_ = (bitField0_ & ~0x00000002);
column_ = getDefaultInstance().getColumn();
@ -610,30 +472,18 @@ public final class CellMessage {
// optional int64 timestamp = 3;
private long timestamp_ ;
/**
* <code>optional int64 timestamp = 3;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 timestamp = 3;</code>
*/
public long getTimestamp() {
return timestamp_;
}
/**
* <code>optional int64 timestamp = 3;</code>
*/
public Builder setTimestamp(long value) {
bitField0_ |= 0x00000004;
timestamp_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 timestamp = 3;</code>
*/
public Builder clearTimestamp() {
bitField0_ = (bitField0_ & ~0x00000004);
timestamp_ = 0L;
@ -643,21 +493,12 @@ public final class CellMessage {
// optional bytes data = 4;
private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes data = 4;</code>
*/
public boolean hasData() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bytes data = 4;</code>
*/
public com.google.protobuf.ByteString getData() {
return data_;
}
/**
* <code>optional bytes data = 4;</code>
*/
public Builder setData(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -667,9 +508,6 @@ public final class CellMessage {
onChanged();
return this;
}
/**
* <code>optional bytes data = 4;</code>
*/
public Builder clearData() {
bitField0_ = (bitField0_ & ~0x00000008);
data_ = getDefaultInstance().getData();
@ -717,7 +555,9 @@ public final class CellMessage {
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor,
new java.lang.String[] { "Row", "Column", "Timestamp", "Data", });
new java.lang.String[] { "Row", "Column", "Timestamp", "Data", },
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class,
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
return null;
}
};

View File

@ -12,37 +12,18 @@ public final class TableListMessage {
extends com.google.protobuf.MessageOrBuilder {
// repeated string name = 1;
/**
* <code>repeated string name = 1;</code>
*/
java.util.List<java.lang.String>
getNameList();
/**
* <code>repeated string name = 1;</code>
*/
java.util.List<String> getNameList();
int getNameCount();
/**
* <code>repeated string name = 1;</code>
*/
java.lang.String getName(int index);
/**
* <code>repeated string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes(int index);
String getName(int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
*/
public static final class TableList extends
com.google.protobuf.GeneratedMessage
implements TableListOrBuilder {
// Use TableList.newBuilder() to construct.
private TableList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private TableList(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private TableList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private TableList(boolean noInit) {}
private static final TableList defaultInstance;
public static TableList getDefaultInstance() {
@ -53,58 +34,6 @@ public final class TableListMessage {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TableList(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
name_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
name_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
name_ = new com.google.protobuf.UnmodifiableLazyStringList(name_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
@ -112,55 +41,22 @@ public final class TableListMessage {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
}
public static com.google.protobuf.Parser<TableList> PARSER =
new com.google.protobuf.AbstractParser<TableList>() {
public TableList parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TableList(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<TableList> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
}
// repeated string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList name_;
/**
* <code>repeated string name = 1;</code>
*/
public java.util.List<java.lang.String>
public java.util.List<String>
getNameList() {
return name_;
}
/**
* <code>repeated string name = 1;</code>
*/
public int getNameCount() {
return name_.size();
}
/**
* <code>repeated string name = 1;</code>
*/
public java.lang.String getName(int index) {
public String getName(int index) {
return name_.get(index);
}
/**
* <code>repeated string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes(int index) {
return name_.getByteString(index);
}
private void initFields() {
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
@ -213,54 +109,68 @@ public final class TableListMessage {
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -276,9 +186,6 @@ public final class TableListMessage {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableListOrBuilder {
@ -289,9 +196,7 @@ public final class TableListMessage {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.newBuilder()
@ -299,8 +204,7 @@ public final class TableListMessage {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -325,7 +229,7 @@ public final class TableListMessage {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDescriptor();
}
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList getDefaultInstanceForType() {
@ -340,6 +244,16 @@ public final class TableListMessage {
return result;
}
private org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildPartial() {
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList(this);
int from_bitField0_ = bitField0_;
@ -386,19 +300,34 @@ public final class TableListMessage {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
ensureNameIsMutable();
name_.add(input.readBytes());
break;
}
}
}
}
private int bitField0_;
// repeated string name = 1;
@ -409,37 +338,18 @@ public final class TableListMessage {
bitField0_ |= 0x00000001;
}
}
/**
* <code>repeated string name = 1;</code>
*/
public java.util.List<java.lang.String>
public java.util.List<String>
getNameList() {
return java.util.Collections.unmodifiableList(name_);
}
/**
* <code>repeated string name = 1;</code>
*/
public int getNameCount() {
return name_.size();
}
/**
* <code>repeated string name = 1;</code>
*/
public java.lang.String getName(int index) {
public String getName(int index) {
return name_.get(index);
}
/**
* <code>repeated string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes(int index) {
return name_.getByteString(index);
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder setName(
int index, java.lang.String value) {
int index, String value) {
if (value == null) {
throw new NullPointerException();
}
@ -448,11 +358,7 @@ public final class TableListMessage {
onChanged();
return this;
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder addName(
java.lang.String value) {
public Builder addName(String value) {
if (value == null) {
throw new NullPointerException();
}
@ -461,37 +367,23 @@ public final class TableListMessage {
onChanged();
return this;
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder addAllName(
java.lang.Iterable<java.lang.String> values) {
java.lang.Iterable<String> values) {
ensureNameIsMutable();
super.addAll(values, name_);
onChanged();
return this;
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder clearName() {
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder addNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
void addName(com.google.protobuf.ByteString value) {
ensureNameIsMutable();
name_.add(value);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableList)
@ -533,7 +425,9 @@ public final class TableListMessage {
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor,
new java.lang.String[] { "Name", });
new java.lang.String[] { "Name", },
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class,
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
return null;
}
};

View File

@ -12,37 +12,21 @@ public final class ColumnAggregationProtos {
extends com.google.protobuf.MessageOrBuilder {
// required bytes family = 1;
/**
* <code>required bytes family = 1;</code>
*/
boolean hasFamily();
/**
* <code>required bytes family = 1;</code>
*/
com.google.protobuf.ByteString getFamily();
// optional bytes qualifier = 2;
/**
* <code>optional bytes qualifier = 2;</code>
*/
boolean hasQualifier();
/**
* <code>optional bytes qualifier = 2;</code>
*/
com.google.protobuf.ByteString getQualifier();
}
/**
* Protobuf type {@code SumRequest}
*/
public static final class SumRequest extends
com.google.protobuf.GeneratedMessage
implements SumRequestOrBuilder {
// Use SumRequest.newBuilder() to construct.
private SumRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private SumRequest(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private SumRequest(boolean noInit) {}
private static final SumRequest defaultInstance;
public static SumRequest getDefaultInstance() {
@ -53,57 +37,6 @@ public final class ColumnAggregationProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SumRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
family_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
qualifier_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor;
@ -111,39 +44,16 @@ public final class ColumnAggregationProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class);
}
public static com.google.protobuf.Parser<SumRequest> PARSER =
new com.google.protobuf.AbstractParser<SumRequest>() {
public SumRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SumRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SumRequest> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable;
}
private int bitField0_;
// required bytes family = 1;
public static final int FAMILY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString family_;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
@ -151,15 +61,9 @@ public final class ColumnAggregationProtos {
// optional bytes qualifier = 2;
public static final int QUALIFIER_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString qualifier_;
/**
* <code>optional bytes qualifier = 2;</code>
*/
public boolean hasQualifier() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes qualifier = 2;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
@ -245,12 +149,8 @@ public final class ColumnAggregationProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasFamily()) {
@ -262,61 +162,74 @@ public final class ColumnAggregationProtos {
hash = (53 * hash) + getQualifier().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -332,9 +245,6 @@ public final class ColumnAggregationProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code SumRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequestOrBuilder {
@ -345,9 +255,7 @@ public final class ColumnAggregationProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class);
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.newBuilder()
@ -355,8 +263,7 @@ public final class ColumnAggregationProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -383,7 +290,7 @@ public final class ColumnAggregationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.getDescriptor();
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest getDefaultInstanceForType() {
@ -398,6 +305,16 @@ public final class ColumnAggregationProtos {
return result;
}
private org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest buildPartial() {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest(this);
int from_bitField0_ = bitField0_;
@ -448,38 +365,49 @@ public final class ColumnAggregationProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
family_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
qualifier_ = input.readBytes();
break;
}
}
}
}
private int bitField0_;
// required bytes family = 1;
private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder setFamily(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -489,9 +417,6 @@ public final class ColumnAggregationProtos {
onChanged();
return this;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder clearFamily() {
bitField0_ = (bitField0_ & ~0x00000001);
family_ = getDefaultInstance().getFamily();
@ -501,21 +426,12 @@ public final class ColumnAggregationProtos {
// optional bytes qualifier = 2;
private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes qualifier = 2;</code>
*/
public boolean hasQualifier() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes qualifier = 2;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
/**
* <code>optional bytes qualifier = 2;</code>
*/
public Builder setQualifier(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -525,9 +441,6 @@ public final class ColumnAggregationProtos {
onChanged();
return this;
}
/**
* <code>optional bytes qualifier = 2;</code>
*/
public Builder clearQualifier() {
bitField0_ = (bitField0_ & ~0x00000002);
qualifier_ = getDefaultInstance().getQualifier();
@ -550,27 +463,17 @@ public final class ColumnAggregationProtos {
extends com.google.protobuf.MessageOrBuilder {
// required int64 sum = 1;
/**
* <code>required int64 sum = 1;</code>
*/
boolean hasSum();
/**
* <code>required int64 sum = 1;</code>
*/
long getSum();
}
/**
* Protobuf type {@code SumResponse}
*/
public static final class SumResponse extends
com.google.protobuf.GeneratedMessage
implements SumResponseOrBuilder {
// Use SumResponse.newBuilder() to construct.
private SumResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private SumResponse(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private SumResponse(boolean noInit) {}
private static final SumResponse defaultInstance;
public static SumResponse getDefaultInstance() {
@ -581,52 +484,6 @@ public final class ColumnAggregationProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SumResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
sum_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor;
@ -634,39 +491,16 @@ public final class ColumnAggregationProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class);
}
public static com.google.protobuf.Parser<SumResponse> PARSER =
new com.google.protobuf.AbstractParser<SumResponse>() {
public SumResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SumResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SumResponse> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable;
}
private int bitField0_;
// required int64 sum = 1;
public static final int SUM_FIELD_NUMBER = 1;
private long sum_;
/**
* <code>required int64 sum = 1;</code>
*/
public boolean hasSum() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 sum = 1;</code>
*/
public long getSum() {
return sum_;
}
@ -739,12 +573,8 @@ public final class ColumnAggregationProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSum()) {
@ -752,61 +582,74 @@ public final class ColumnAggregationProtos {
hash = (53 * hash) + hashLong(getSum());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -822,9 +665,6 @@ public final class ColumnAggregationProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code SumResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponseOrBuilder {
@ -835,9 +675,7 @@ public final class ColumnAggregationProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class);
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.newBuilder()
@ -845,8 +683,7 @@ public final class ColumnAggregationProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -871,7 +708,7 @@ public final class ColumnAggregationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDescriptor();
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse getDefaultInstanceForType() {
@ -886,6 +723,16 @@ public final class ColumnAggregationProtos {
return result;
}
private org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse buildPartial() {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse(this);
int from_bitField0_ = bitField0_;
@ -929,47 +776,50 @@ public final class ColumnAggregationProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
sum_ = input.readInt64();
break;
}
}
}
}
private int bitField0_;
// required int64 sum = 1;
private long sum_ ;
/**
* <code>required int64 sum = 1;</code>
*/
public boolean hasSum() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 sum = 1;</code>
*/
public long getSum() {
return sum_;
}
/**
* <code>required int64 sum = 1;</code>
*/
public Builder setSum(long value) {
bitField0_ |= 0x00000001;
sum_ = value;
onChanged();
return this;
}
/**
* <code>required int64 sum = 1;</code>
*/
public Builder clearSum() {
bitField0_ = (bitField0_ & ~0x00000001);
sum_ = 0L;
@ -988,17 +838,11 @@ public final class ColumnAggregationProtos {
// @@protoc_insertion_point(class_scope:SumResponse)
}
/**
* Protobuf service {@code ColumnAggregationService}
*/
public static abstract class ColumnAggregationService
implements com.google.protobuf.Service {
protected ColumnAggregationService() {}
public interface Interface {
/**
* <code>rpc sum(.SumRequest) returns (.SumResponse);</code>
*/
public abstract void sum(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request,
@ -1081,9 +925,6 @@ public final class ColumnAggregationProtos {
};
}
/**
* <code>rpc sum(.SumRequest) returns (.SumResponse);</code>
*/
public abstract void sum(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request,
@ -1216,8 +1057,6 @@ public final class ColumnAggregationProtos {
}
}
// @@protoc_insertion_point(class_scope:ColumnAggregationService)
}
private static com.google.protobuf.Descriptors.Descriptor
@ -1257,13 +1096,17 @@ public final class ColumnAggregationProtos {
internal_static_SumRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_SumRequest_descriptor,
new java.lang.String[] { "Family", "Qualifier", });
new java.lang.String[] { "Family", "Qualifier", },
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class);
internal_static_SumResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_SumResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_SumResponse_descriptor,
new java.lang.String[] { "Sum", });
new java.lang.String[] { "Sum", },
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class);
return null;
}
};

View File

@ -12,27 +12,17 @@ public final class TestDelayedRpcProtos {
extends com.google.protobuf.MessageOrBuilder {
// required bool delay = 1;
/**
* <code>required bool delay = 1;</code>
*/
boolean hasDelay();
/**
* <code>required bool delay = 1;</code>
*/
boolean getDelay();
}
/**
* Protobuf type {@code TestArg}
*/
public static final class TestArg extends
com.google.protobuf.GeneratedMessage
implements TestArgOrBuilder {
// Use TestArg.newBuilder() to construct.
private TestArg(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private TestArg(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private TestArg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private TestArg(boolean noInit) {}
private static final TestArg defaultInstance;
public static TestArg getDefaultInstance() {
@ -43,52 +33,6 @@ public final class TestDelayedRpcProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TestArg(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
delay_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_descriptor;
@ -96,39 +40,16 @@ public final class TestDelayedRpcProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.Builder.class);
}
public static com.google.protobuf.Parser<TestArg> PARSER =
new com.google.protobuf.AbstractParser<TestArg>() {
public TestArg parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TestArg(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<TestArg> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable;
}
private int bitField0_;
// required bool delay = 1;
public static final int DELAY_FIELD_NUMBER = 1;
private boolean delay_;
/**
* <code>required bool delay = 1;</code>
*/
public boolean hasDelay() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool delay = 1;</code>
*/
public boolean getDelay() {
return delay_;
}
@ -201,12 +122,8 @@ public final class TestDelayedRpcProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasDelay()) {
@ -214,61 +131,74 @@ public final class TestDelayedRpcProtos {
hash = (53 * hash) + hashBoolean(getDelay());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -284,9 +214,6 @@ public final class TestDelayedRpcProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code TestArg}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArgOrBuilder {
@ -297,9 +224,7 @@ public final class TestDelayedRpcProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.Builder.class);
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.newBuilder()
@ -307,8 +232,7 @@ public final class TestDelayedRpcProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -333,7 +257,7 @@ public final class TestDelayedRpcProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_descriptor;
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.getDescriptor();
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg getDefaultInstanceForType() {
@ -348,6 +272,16 @@ public final class TestDelayedRpcProtos {
return result;
}
private org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg buildPartial() {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg(this);
int from_bitField0_ = bitField0_;
@ -391,47 +325,50 @@ public final class TestDelayedRpcProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
delay_ = input.readBool();
break;
}
}
}
}
private int bitField0_;
// required bool delay = 1;
private boolean delay_ ;
/**
* <code>required bool delay = 1;</code>
*/
public boolean hasDelay() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool delay = 1;</code>
*/
public boolean getDelay() {
return delay_;
}
/**
* <code>required bool delay = 1;</code>
*/
public Builder setDelay(boolean value) {
bitField0_ |= 0x00000001;
delay_ = value;
onChanged();
return this;
}
/**
* <code>required bool delay = 1;</code>
*/
public Builder clearDelay() {
bitField0_ = (bitField0_ & ~0x00000001);
delay_ = false;
@ -454,27 +391,17 @@ public final class TestDelayedRpcProtos {
extends com.google.protobuf.MessageOrBuilder {
// required int32 response = 1;
/**
* <code>required int32 response = 1;</code>
*/
boolean hasResponse();
/**
* <code>required int32 response = 1;</code>
*/
int getResponse();
}
/**
* Protobuf type {@code TestResponse}
*/
public static final class TestResponse extends
com.google.protobuf.GeneratedMessage
implements TestResponseOrBuilder {
// Use TestResponse.newBuilder() to construct.
private TestResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
private TestResponse(Builder builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private TestResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private TestResponse(boolean noInit) {}
private static final TestResponse defaultInstance;
public static TestResponse getDefaultInstance() {
@ -485,52 +412,6 @@ public final class TestDelayedRpcProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TestResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
response_ = input.readInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_descriptor;
@ -538,39 +419,16 @@ public final class TestDelayedRpcProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.Builder.class);
}
public static com.google.protobuf.Parser<TestResponse> PARSER =
new com.google.protobuf.AbstractParser<TestResponse>() {
public TestResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TestResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<TestResponse> getParserForType() {
return PARSER;
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable;
}
private int bitField0_;
// required int32 response = 1;
public static final int RESPONSE_FIELD_NUMBER = 1;
private int response_;
/**
* <code>required int32 response = 1;</code>
*/
public boolean hasResponse() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 response = 1;</code>
*/
public int getResponse() {
return response_;
}
@ -643,12 +501,8 @@ public final class TestDelayedRpcProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasResponse()) {
@ -656,61 +510,74 @@ public final class TestDelayedRpcProtos {
hash = (53 * hash) + getResponse();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
return newBuilder().mergeFrom(data).buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
return newBuilder().mergeFrom(input).buildParsed();
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
}
public static Builder newBuilder() { return Builder.create(); }
@ -726,9 +593,6 @@ public final class TestDelayedRpcProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code TestResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponseOrBuilder {
@ -739,9 +603,7 @@ public final class TestDelayedRpcProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.Builder.class);
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable;
}
// Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.newBuilder()
@ -749,8 +611,7 @@ public final class TestDelayedRpcProtos {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
private Builder(BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -775,7 +636,7 @@ public final class TestDelayedRpcProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_descriptor;
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDescriptor();
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse getDefaultInstanceForType() {
@ -790,6 +651,16 @@ public final class TestDelayedRpcProtos {
return result;
}
private org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse buildPartial() {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse(this);
int from_bitField0_ = bitField0_;
@ -833,47 +704,50 @@ public final class TestDelayedRpcProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
response_ = input.readInt32();
break;
}
}
}
}
private int bitField0_;
// required int32 response = 1;
private int response_ ;
/**
* <code>required int32 response = 1;</code>
*/
public boolean hasResponse() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 response = 1;</code>
*/
public int getResponse() {
return response_;
}
/**
* <code>required int32 response = 1;</code>
*/
public Builder setResponse(int value) {
bitField0_ |= 0x00000001;
response_ = value;
onChanged();
return this;
}
/**
* <code>required int32 response = 1;</code>
*/
public Builder clearResponse() {
bitField0_ = (bitField0_ & ~0x00000001);
response_ = 0;
@ -927,13 +801,17 @@ public final class TestDelayedRpcProtos {
internal_static_TestArg_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_TestArg_descriptor,
new java.lang.String[] { "Delay", });
new java.lang.String[] { "Delay", },
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.class,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.Builder.class);
internal_static_TestResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_TestResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_TestResponse_descriptor,
new java.lang.String[] { "Response", });
new java.lang.String[] { "Response", },
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.Builder.class);
return null;
}
};

View File

@ -8,38 +8,21 @@ public final class TestRpcServiceProtos {
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf service {@code TestProtobufRpcProto}
*
* <pre>
**
* A protobuf service for use in tests
* </pre>
*/
public static abstract class TestProtobufRpcProto
implements com.google.protobuf.Service {
protected TestProtobufRpcProto() {}
public interface Interface {
/**
* <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
/**
* <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
/**
* <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
@ -150,25 +133,16 @@ public final class TestRpcServiceProtos {
};
}
/**
* <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
/**
* <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
/**
* <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
@ -383,8 +357,6 @@ public final class TestRpcServiceProtos {
}
}
// @@protoc_insertion_point(class_scope:TestProtobufRpcProto)
}

View File

@ -1,27 +0,0 @@
These are the protobuf definition files used by tests. The produced java
classes are generated into src/test/java/org/apache/hadoop/hbase/protobuf/generated
and then checked in. The reasoning is that they change infrequently.
To regnerate the classes after making definition file changes, ensure first that
the protobuf protoc tool is in your $PATH (You may need to download it and build
it first; its part of the protobuf package obtainable from here:
http://code.google.com/p/protobuf/downloads/list). Then run the following (You
should be able to just copy and paste the below into a terminal and hit return
-- the protoc compiler runs fast):
UNIX_PROTO_DIR=src/test/protobuf
JAVA_DIR=src/test/java/
mkdir -p $JAVA_DIR 2> /dev/null
if which cygpath 2> /dev/null; then
PROTO_DIR=`cygpath --windows $UNIX_PROTO_DIR`
JAVA_DIR=`cygpath --windows $JAVA_DIR`
else
PROTO_DIR=$UNIX_PROTO_DIR
fi
for PROTO_FILE in $UNIX_PROTO_DIR/*.proto
do
protoc -I$PROTO_DIR --java_out=$JAVA_DIR $PROTO_FILE
done
After you've done the above, check it in and then check it in (or post a patch
on a JIRA with your definition file changes and the generated files).

View File

@ -891,7 +891,7 @@
<slf4j.version>1.4.3</slf4j.version>
<log4j.version>1.2.17</log4j.version>
<mockito-all.version>1.9.0</mockito-all.version>
<protobuf.version>2.5.0</protobuf.version>
<protobuf.version>2.4.1</protobuf.version>
<stax-api.version>1.0.1</stax-api.version>
<thrift.version>0.9.0</thrift.version>
<zookeeper.version>3.4.5</zookeeper.version>