HBASE-8165 Move to Hadoop 2.1.0-beta from 2.0.x-alpha (WAS: Update our protobuf to 2.5 from 2.4.1)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1516084 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-08-21 05:04:20 +00:00
parent 5bbe4dbc0d
commit d663f2baa1
50 changed files with 91692 additions and 48275 deletions

View File

@ -345,7 +345,7 @@ public class ServerName implements Comparable<ServerName>, Serializable {
int prefixLen = ProtobufUtil.lengthOfPBMagic();
try {
MetaRegionServer rss =
MetaRegionServer.newBuilder().mergeFrom(data, prefixLen, data.length - prefixLen).build();
MetaRegionServer.PARSER.parseFrom(data, prefixLen, data.length - prefixLen);
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName sn = rss.getServer();
return new ServerName(sn.getHostName(), sn.getPort(), sn.getStartCode());
} catch (InvalidProtocolBufferException e) {

View File

@ -132,6 +132,7 @@ import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import com.google.protobuf.RpcChannel;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
@ -2055,19 +2056,21 @@ public final class ProtobufUtil {
}
public static ScanMetrics toScanMetrics(final byte[] bytes) {
MapReduceProtos.ScanMetrics.Builder builder = MapReduceProtos.ScanMetrics.newBuilder();
Parser<MapReduceProtos.ScanMetrics> parser = MapReduceProtos.ScanMetrics.PARSER;
MapReduceProtos.ScanMetrics pScanMetrics = null;
try {
builder.mergeFrom(bytes);
pScanMetrics = parser.parseFrom(bytes);
} catch (InvalidProtocolBufferException e) {
//Ignored there are just no key values to add.
}
MapReduceProtos.ScanMetrics pScanMetrics = builder.build();
ScanMetrics scanMetrics = new ScanMetrics();
if (pScanMetrics != null) {
for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) {
if (pair.hasName() && pair.hasValue()) {
scanMetrics.setCounter(pair.getName(), pair.getValue());
}
}
}
return scanMetrics;
}

View File

@ -824,24 +824,34 @@ public final class RequestConverter {
return builder.build();
}
/**
* @see {@link #buildRollWALWriterRequest()
*/
private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST =
RollWALWriterRequest.newBuilder().build();
/**
* Create a new RollWALWriterRequest
*
* @return a ReplicateWALEntryRequest
*/
public static RollWALWriterRequest buildRollWALWriterRequest() {
RollWALWriterRequest.Builder builder = RollWALWriterRequest.newBuilder();
return builder.build();
return ROLL_WAL_WRITER_REQUEST;
}
/**
* @see {@link #buildGetServerInfoRequest()}
*/
private static GetServerInfoRequest GET_SERVER_INFO_REQUEST =
GetServerInfoRequest.newBuilder().build();
/**
* Create a new GetServerInfoRequest
*
* @return a GetServerInfoRequest
*/
public static GetServerInfoRequest buildGetServerInfoRequest() {
GetServerInfoRequest.Builder builder = GetServerInfoRequest.newBuilder();
return builder.build();
return GET_SERVER_INFO_REQUEST;
}
/**
@ -1157,21 +1167,33 @@ public final class RequestConverter {
return SetBalancerRunningRequest.newBuilder().setOn(on).setSynchronous(synchronous).build();
}
/**
* @see {@link #buildGetClusterStatusRequest}
*/
private static final GetClusterStatusRequest GET_CLUSTER_STATUS_REQUEST =
GetClusterStatusRequest.newBuilder().build();
/**
* Creates a protocol buffer GetClusterStatusRequest
*
* @return A GetClusterStatusRequest
*/
public static GetClusterStatusRequest buildGetClusterStatusRequest() {
return GetClusterStatusRequest.newBuilder().build();
return GET_CLUSTER_STATUS_REQUEST;
}
/**
* @see {@link #buildCatalogScanRequest}
*/
private static final CatalogScanRequest CATALOG_SCAN_REQUEST =
CatalogScanRequest.newBuilder().build();
/**
* Creates a request for running a catalog scan
* @return A {@link CatalogScanRequest}
*/
public static CatalogScanRequest buildCatalogScanRequest() {
return CatalogScanRequest.newBuilder().build();
return CATALOG_SCAN_REQUEST;
}
/**
@ -1182,12 +1204,18 @@ public final class RequestConverter {
return EnableCatalogJanitorRequest.newBuilder().setEnable(enable).build();
}
/**
* @see {@link #buildIsCatalogJanitorEnabledRequest()}
*/
private static final IsCatalogJanitorEnabledRequest IS_CATALOG_JANITOR_ENABLED_REQUEST =
IsCatalogJanitorEnabledRequest.newBuilder().build();
/**
* Creates a request for querying the master whether the catalog janitor is enabled
* @return A {@link IsCatalogJanitorEnabledRequest}
*/
public static IsCatalogJanitorEnabledRequest buildIsCatalogJanitorEnabledRequest() {
return IsCatalogJanitorEnabledRequest.newBuilder().build();
return IS_CATALOG_JANITOR_ENABLED_REQUEST;
}
/**

View File

@ -27,7 +27,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
@ -90,13 +89,24 @@ public class HBaseCommonTestingUtility {
}
String randomStr = UUID.randomUUID().toString();
Path testPath= new Path(getBaseTestDir(), randomStr);
Path testPath = new Path(getBaseTestDir(), randomStr);
this.dataTestDir = new File(testPath.toString()).getAbsoluteFile();
this.dataTestDir.deleteOnExit();
// Set this property so if mapreduce jobs run, they will use this as their home dir.
System.setProperty("test.build.dir", this.dataTestDir.toString());
if (deleteOnExit()) this.dataTestDir.deleteOnExit();
return testPath;
}
/**
* @return True if we should delete testing dirs on exit.
*/
boolean deleteOnExit() {
String v = System.getProperty("hbase.testing.preserve.testdir");
// Let default be true, to delete on exit.
return v == null? true: !Boolean.parseBoolean(v);
}
/**
* @return True if we removed the test dirs
* @throws IOException
@ -146,7 +156,7 @@ public class HBaseCommonTestingUtility {
return true;
}
try {
FileUtils.deleteDirectory(dir);
if (deleteOnExit()) FileUtils.deleteDirectory(dir);
return true;
} catch (IOException ex) {
LOG.warn("Failed to delete " + dir.getAbsolutePath());

View File

@ -12,17 +12,50 @@ public final class ClusterIdProtos {
extends com.google.protobuf.MessageOrBuilder {
// required string cluster_id = 1;
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
boolean hasClusterId();
String getClusterId();
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
java.lang.String getClusterId();
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
com.google.protobuf.ByteString
getClusterIdBytes();
}
/**
* Protobuf type {@code ClusterId}
*
* <pre>
**
* Content of the '/hbase/hbaseid', cluster id, znode.
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
* </pre>
*/
public static final class ClusterId extends
com.google.protobuf.GeneratedMessage
implements ClusterIdOrBuilder {
// Use ClusterId.newBuilder() to construct.
private ClusterId(Builder builder) {
private ClusterId(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ClusterId(boolean noInit) {}
private ClusterId(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ClusterId defaultInstance;
public static ClusterId getDefaultInstance() {
@ -33,6 +66,52 @@ public final class ClusterIdProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ClusterId(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
clusterId_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
@ -40,35 +119,75 @@ public final class ClusterIdProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
}
public static com.google.protobuf.Parser<ClusterId> PARSER =
new com.google.protobuf.AbstractParser<ClusterId>() {
public ClusterId parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ClusterId(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ClusterId> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string cluster_id = 1;
public static final int CLUSTER_ID_FIELD_NUMBER = 1;
private java.lang.Object clusterId_;
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public boolean hasClusterId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public String getClusterId() {
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public java.lang.String getClusterId() {
java.lang.Object ref = clusterId_;
if (ref instanceof String) {
return (String) ref;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
if (com.google.protobuf.Internal.isValidUtf8(bs)) {
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
clusterId_ = s;
}
return s;
}
}
private com.google.protobuf.ByteString getClusterIdBytes() {
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public com.google.protobuf.ByteString
getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof String) {
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((String) ref);
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clusterId_ = b;
return b;
} else {
@ -144,8 +263,12 @@ public final class ClusterIdProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasClusterId()) {
@ -153,74 +276,61 @@ public final class ClusterIdProtos {
hash = (53 * hash) + getClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -236,6 +346,15 @@ public final class ClusterIdProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ClusterId}
*
* <pre>
**
* Content of the '/hbase/hbaseid', cluster id, znode.
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder {
@ -246,7 +365,9 @@ public final class ClusterIdProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder()
@ -254,7 +375,8 @@ public final class ClusterIdProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -279,7 +401,7 @@ public final class ClusterIdProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() {
@ -294,16 +416,6 @@ public final class ClusterIdProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId(this);
int from_bitField0_ = bitField0_;
@ -329,7 +441,9 @@ public final class ClusterIdProtos {
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) return this;
if (other.hasClusterId()) {
setClusterId(other.getClusterId());
bitField0_ |= 0x00000001;
clusterId_ = other.clusterId_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
@ -347,52 +461,80 @@ public final class ClusterIdProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
clusterId_ = input.readBytes();
break;
}
}
}
}
private int bitField0_;
// required string cluster_id = 1;
private java.lang.Object clusterId_ = "";
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public boolean hasClusterId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public String getClusterId() {
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public java.lang.String getClusterId() {
java.lang.Object ref = clusterId_;
if (!(ref instanceof String)) {
String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
clusterId_ = s;
return s;
} else {
return (String) ref;
return (java.lang.String) ref;
}
}
public Builder setClusterId(String value) {
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public com.google.protobuf.ByteString
getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public Builder setClusterId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
@ -401,16 +543,35 @@ public final class ClusterIdProtos {
onChanged();
return this;
}
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public Builder clearClusterId() {
bitField0_ = (bitField0_ & ~0x00000001);
clusterId_ = getDefaultInstance().getClusterId();
onChanged();
return this;
}
void setClusterId(com.google.protobuf.ByteString value) {
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public Builder setClusterIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
clusterId_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:ClusterId)
@ -452,9 +613,7 @@ public final class ClusterIdProtos {
internal_static_ClusterId_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ClusterId_descriptor,
new java.lang.String[] { "ClusterId", },
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class,
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
new java.lang.String[] { "ClusterId", });
return null;
}
};

View File

@ -12,17 +12,37 @@ public final class FSProtos {
extends com.google.protobuf.MessageOrBuilder {
// required string version = 1;
/**
* <code>required string version = 1;</code>
*/
boolean hasVersion();
String getVersion();
/**
* <code>required string version = 1;</code>
*/
java.lang.String getVersion();
/**
* <code>required string version = 1;</code>
*/
com.google.protobuf.ByteString
getVersionBytes();
}
/**
* Protobuf type {@code HBaseVersionFileContent}
*
* <pre>
**
* The ${HBASE_ROOTDIR}/hbase.version file content
* </pre>
*/
public static final class HBaseVersionFileContent extends
com.google.protobuf.GeneratedMessage
implements HBaseVersionFileContentOrBuilder {
// Use HBaseVersionFileContent.newBuilder() to construct.
private HBaseVersionFileContent(Builder builder) {
private HBaseVersionFileContent(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private HBaseVersionFileContent(boolean noInit) {}
private HBaseVersionFileContent(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final HBaseVersionFileContent defaultInstance;
public static HBaseVersionFileContent getDefaultInstance() {
@ -33,6 +53,52 @@ public final class FSProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private HBaseVersionFileContent(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
version_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor;
@ -40,35 +106,63 @@ public final class FSProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class);
}
public static com.google.protobuf.Parser<HBaseVersionFileContent> PARSER =
new com.google.protobuf.AbstractParser<HBaseVersionFileContent>() {
public HBaseVersionFileContent parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new HBaseVersionFileContent(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<HBaseVersionFileContent> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string version = 1;
public static final int VERSION_FIELD_NUMBER = 1;
private java.lang.Object version_;
/**
* <code>required string version = 1;</code>
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public String getVersion() {
/**
* <code>required string version = 1;</code>
*/
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (ref instanceof String) {
return (String) ref;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
if (com.google.protobuf.Internal.isValidUtf8(bs)) {
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
version_ = s;
}
return s;
}
}
private com.google.protobuf.ByteString getVersionBytes() {
/**
* <code>required string version = 1;</code>
*/
public com.google.protobuf.ByteString
getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((String) ref);
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
version_ = b;
return b;
} else {
@ -144,8 +238,12 @@ public final class FSProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasVersion()) {
@ -153,74 +251,61 @@ public final class FSProtos {
hash = (53 * hash) + getVersion().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -236,6 +321,14 @@ public final class FSProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code HBaseVersionFileContent}
*
* <pre>
**
* The ${HBASE_ROOTDIR}/hbase.version file content
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder {
@ -246,7 +339,9 @@ public final class FSProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder()
@ -254,7 +349,8 @@ public final class FSProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -279,7 +375,7 @@ public final class FSProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_HBaseVersionFileContent_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getDefaultInstanceForType() {
@ -294,16 +390,6 @@ public final class FSProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent(this);
int from_bitField0_ = bitField0_;
@ -329,7 +415,9 @@ public final class FSProtos {
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) return this;
if (other.hasVersion()) {
setVersion(other.getVersion());
bitField0_ |= 0x00000001;
version_ = other.version_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
@ -347,52 +435,64 @@ public final class FSProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
version_ = input.readBytes();
break;
}
}
}
}
private int bitField0_;
// required string version = 1;
private java.lang.Object version_ = "";
/**
* <code>required string version = 1;</code>
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public String getVersion() {
/**
* <code>required string version = 1;</code>
*/
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (!(ref instanceof String)) {
String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
version_ = s;
return s;
} else {
return (String) ref;
return (java.lang.String) ref;
}
}
public Builder setVersion(String value) {
/**
* <code>required string version = 1;</code>
*/
public com.google.protobuf.ByteString
getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string version = 1;</code>
*/
public Builder setVersion(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
@ -401,16 +501,27 @@ public final class FSProtos {
onChanged();
return this;
}
/**
* <code>required string version = 1;</code>
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = getDefaultInstance().getVersion();
onChanged();
return this;
}
void setVersion(com.google.protobuf.ByteString value) {
/**
* <code>required string version = 1;</code>
*/
public Builder setVersionBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
version_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:HBaseVersionFileContent)
@ -428,21 +539,42 @@ public final class FSProtos {
extends com.google.protobuf.MessageOrBuilder {
// required bytes splitkey = 1;
/**
* <code>required bytes splitkey = 1;</code>
*/
boolean hasSplitkey();
/**
* <code>required bytes splitkey = 1;</code>
*/
com.google.protobuf.ByteString getSplitkey();
// required .Reference.Range range = 2;
/**
* <code>required .Reference.Range range = 2;</code>
*/
boolean hasRange();
/**
* <code>required .Reference.Range range = 2;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange();
}
/**
* Protobuf type {@code Reference}
*
* <pre>
**
* Reference file content used when we split an hfile under a region.
* </pre>
*/
public static final class Reference extends
com.google.protobuf.GeneratedMessage
implements ReferenceOrBuilder {
// Use Reference.newBuilder() to construct.
private Reference(Builder builder) {
private Reference(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Reference(boolean noInit) {}
private Reference(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Reference defaultInstance;
public static Reference getDefaultInstance() {
@ -453,6 +585,63 @@ public final class FSProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Reference(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
splitkey_ = input.readBytes();
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
range_ = value;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor;
@ -460,16 +649,48 @@ public final class FSProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class);
}
public static com.google.protobuf.Parser<Reference> PARSER =
new com.google.protobuf.AbstractParser<Reference>() {
public Reference parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Reference(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Reference> getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code Reference.Range}
*/
public enum Range
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>TOP = 0;</code>
*/
TOP(0, 0),
/**
* <code>BOTTOM = 1;</code>
*/
BOTTOM(1, 1),
;
/**
* <code>TOP = 0;</code>
*/
public static final int TOP_VALUE = 0;
/**
* <code>BOTTOM = 1;</code>
*/
public static final int BOTTOM_VALUE = 1;
@ -508,9 +729,7 @@ public final class FSProtos {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor().getEnumTypes().get(0);
}
private static final Range[] VALUES = {
TOP, BOTTOM,
};
private static final Range[] VALUES = values();
public static Range valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
@ -536,9 +755,15 @@ public final class FSProtos {
// required bytes splitkey = 1;
public static final int SPLITKEY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString splitkey_;
/**
* <code>required bytes splitkey = 1;</code>
*/
public boolean hasSplitkey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes splitkey = 1;</code>
*/
public com.google.protobuf.ByteString getSplitkey() {
return splitkey_;
}
@ -546,9 +771,15 @@ public final class FSProtos {
// required .Reference.Range range = 2;
public static final int RANGE_FIELD_NUMBER = 2;
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_;
/**
* <code>required .Reference.Range range = 2;</code>
*/
public boolean hasRange() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .Reference.Range range = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() {
return range_;
}
@ -638,8 +869,12 @@ public final class FSProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSplitkey()) {
@ -651,74 +886,61 @@ public final class FSProtos {
hash = (53 * hash) + hashEnum(getRange());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -734,6 +956,14 @@ public final class FSProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code Reference}
*
* <pre>
**
* Reference file content used when we split an hfile under a region.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.FSProtos.ReferenceOrBuilder {
@ -744,7 +974,9 @@ public final class FSProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class, org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.newBuilder()
@ -752,7 +984,8 @@ public final class FSProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -779,7 +1012,7 @@ public final class FSProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.FSProtos.internal_static_Reference_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference getDefaultInstanceForType() {
@ -794,16 +1027,6 @@ public final class FSProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference result = new org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference(this);
int from_bitField0_ = bitField0_;
@ -858,55 +1081,38 @@ public final class FSProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
splitkey_ = input.readBytes();
break;
}
case 16: {
int rawValue = input.readEnum();
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(2, rawValue);
} else {
bitField0_ |= 0x00000002;
range_ = value;
}
break;
}
}
}
}
private int bitField0_;
// required bytes splitkey = 1;
private com.google.protobuf.ByteString splitkey_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes splitkey = 1;</code>
*/
public boolean hasSplitkey() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes splitkey = 1;</code>
*/
public com.google.protobuf.ByteString getSplitkey() {
return splitkey_;
}
/**
* <code>required bytes splitkey = 1;</code>
*/
public Builder setSplitkey(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -916,6 +1122,9 @@ public final class FSProtos {
onChanged();
return this;
}
/**
* <code>required bytes splitkey = 1;</code>
*/
public Builder clearSplitkey() {
bitField0_ = (bitField0_ & ~0x00000001);
splitkey_ = getDefaultInstance().getSplitkey();
@ -925,12 +1134,21 @@ public final class FSProtos {
// required .Reference.Range range = 2;
private org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP;
/**
* <code>required .Reference.Range range = 2;</code>
*/
public boolean hasRange() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required .Reference.Range range = 2;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range getRange() {
return range_;
}
/**
* <code>required .Reference.Range range = 2;</code>
*/
public Builder setRange(org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range value) {
if (value == null) {
throw new NullPointerException();
@ -940,6 +1158,9 @@ public final class FSProtos {
onChanged();
return this;
}
/**
* <code>required .Reference.Range range = 2;</code>
*/
public Builder clearRange() {
bitField0_ = (bitField0_ & ~0x00000002);
range_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Range.TOP;
@ -994,17 +1215,13 @@ public final class FSProtos {
internal_static_HBaseVersionFileContent_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_HBaseVersionFileContent_descriptor,
new java.lang.String[] { "Version", },
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.class,
org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder.class);
new java.lang.String[] { "Version", });
internal_static_Reference_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_Reference_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_Reference_descriptor,
new java.lang.String[] { "Splitkey", "Range", },
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.class,
org.apache.hadoop.hbase.protobuf.generated.FSProtos.Reference.Builder.class);
new java.lang.String[] { "Splitkey", "Range", });
return null;
}
};

View File

@ -12,17 +12,27 @@ public final class LoadBalancerProtos {
extends com.google.protobuf.MessageOrBuilder {
// optional bool balancer_on = 1;
/**
* <code>optional bool balancer_on = 1;</code>
*/
boolean hasBalancerOn();
/**
* <code>optional bool balancer_on = 1;</code>
*/
boolean getBalancerOn();
}
/**
* Protobuf type {@code LoadBalancerState}
*/
public static final class LoadBalancerState extends
com.google.protobuf.GeneratedMessage
implements LoadBalancerStateOrBuilder {
// Use LoadBalancerState.newBuilder() to construct.
private LoadBalancerState(Builder builder) {
private LoadBalancerState(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private LoadBalancerState(boolean noInit) {}
private LoadBalancerState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final LoadBalancerState defaultInstance;
public static LoadBalancerState getDefaultInstance() {
@ -33,6 +43,52 @@ public final class LoadBalancerProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LoadBalancerState(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
balancerOn_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
@ -40,16 +96,39 @@ public final class LoadBalancerProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
}
public static com.google.protobuf.Parser<LoadBalancerState> PARSER =
new com.google.protobuf.AbstractParser<LoadBalancerState>() {
public LoadBalancerState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new LoadBalancerState(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<LoadBalancerState> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bool balancer_on = 1;
public static final int BALANCER_ON_FIELD_NUMBER = 1;
private boolean balancerOn_;
/**
* <code>optional bool balancer_on = 1;</code>
*/
public boolean hasBalancerOn() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool balancer_on = 1;</code>
*/
public boolean getBalancerOn() {
return balancerOn_;
}
@ -118,8 +197,12 @@ public final class LoadBalancerProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasBalancerOn()) {
@ -127,74 +210,61 @@ public final class LoadBalancerProtos {
hash = (53 * hash) + hashBoolean(getBalancerOn());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -210,6 +280,9 @@ public final class LoadBalancerProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code LoadBalancerState}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder {
@ -220,7 +293,9 @@ public final class LoadBalancerProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.newBuilder()
@ -228,7 +303,8 @@ public final class LoadBalancerProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -253,7 +329,7 @@ public final class LoadBalancerProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstanceForType() {
@ -268,16 +344,6 @@ public final class LoadBalancerProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = new org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState(this);
int from_bitField0_ = bitField0_;
@ -317,50 +383,47 @@ public final class LoadBalancerProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
balancerOn_ = input.readBool();
break;
}
}
}
}
private int bitField0_;
// optional bool balancer_on = 1;
private boolean balancerOn_ ;
/**
* <code>optional bool balancer_on = 1;</code>
*/
public boolean hasBalancerOn() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool balancer_on = 1;</code>
*/
public boolean getBalancerOn() {
return balancerOn_;
}
/**
* <code>optional bool balancer_on = 1;</code>
*/
public Builder setBalancerOn(boolean value) {
bitField0_ |= 0x00000001;
balancerOn_ = value;
onChanged();
return this;
}
/**
* <code>optional bool balancer_on = 1;</code>
*/
public Builder clearBalancerOn() {
bitField0_ = (bitField0_ & ~0x00000001);
balancerOn_ = false;
@ -408,9 +471,7 @@ public final class LoadBalancerProtos {
internal_static_LoadBalancerState_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_LoadBalancerState_descriptor,
new java.lang.String[] { "BalancerOn", },
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class,
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
new java.lang.String[] { "BalancerOn", });
return null;
}
};

View File

@ -12,23 +12,42 @@ public final class MapReduceProtos {
extends com.google.protobuf.MessageOrBuilder {
// repeated .NameInt64Pair metrics = 1;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair>
getMetricsList();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index);
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
int getMetricsCount();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index);
}
/**
* Protobuf type {@code ScanMetrics}
*/
public static final class ScanMetrics extends
com.google.protobuf.GeneratedMessage
implements ScanMetricsOrBuilder {
// Use ScanMetrics.newBuilder() to construct.
private ScanMetrics(Builder builder) {
private ScanMetrics(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ScanMetrics(boolean noInit) {}
private ScanMetrics(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ScanMetrics defaultInstance;
public static ScanMetrics getDefaultInstance() {
@ -39,6 +58,58 @@ public final class MapReduceProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ScanMetrics(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
metrics_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair>();
mutable_bitField0_ |= 0x00000001;
}
metrics_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
metrics_ = java.util.Collections.unmodifiableList(metrics_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
@ -46,25 +117,57 @@ public final class MapReduceProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
}
public static com.google.protobuf.Parser<ScanMetrics> PARSER =
new com.google.protobuf.AbstractParser<ScanMetrics>() {
public ScanMetrics parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ScanMetrics(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ScanMetrics> getParserForType() {
return PARSER;
}
// repeated .NameInt64Pair metrics = 1;
public static final int METRICS_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> metrics_;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
return metrics_;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList() {
return metrics_;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public int getMetricsCount() {
return metrics_.size();
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
return metrics_.get(index);
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index) {
return metrics_.get(index);
@ -131,8 +234,12 @@ public final class MapReduceProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getMetricsCount() > 0) {
@ -140,74 +247,61 @@ public final class MapReduceProtos {
hash = (53 * hash) + getMetricsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -223,6 +317,9 @@ public final class MapReduceProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ScanMetrics}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder {
@ -233,7 +330,9 @@ public final class MapReduceProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder()
@ -241,7 +340,8 @@ public final class MapReduceProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -271,7 +371,7 @@ public final class MapReduceProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getDefaultInstanceForType() {
@ -286,16 +386,6 @@ public final class MapReduceProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = new org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics(this);
int from_bitField0_ = bitField0_;
@ -361,35 +451,19 @@ public final class MapReduceProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addMetrics(subBuilder.buildPartial());
break;
}
}
}
}
private int bitField0_;
// repeated .NameInt64Pair metrics = 1;
@ -405,6 +479,9 @@ public final class MapReduceProtos {
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder> metricsBuilder_;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
if (metricsBuilder_ == null) {
return java.util.Collections.unmodifiableList(metrics_);
@ -412,6 +489,9 @@ public final class MapReduceProtos {
return metricsBuilder_.getMessageList();
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public int getMetricsCount() {
if (metricsBuilder_ == null) {
return metrics_.size();
@ -419,6 +499,9 @@ public final class MapReduceProtos {
return metricsBuilder_.getCount();
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
if (metricsBuilder_ == null) {
return metrics_.get(index);
@ -426,6 +509,9 @@ public final class MapReduceProtos {
return metricsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder setMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
@ -440,6 +526,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder setMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
if (metricsBuilder_ == null) {
@ -451,6 +540,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
if (value == null) {
@ -464,6 +556,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
@ -478,6 +573,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
if (metricsBuilder_ == null) {
@ -489,6 +587,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
if (metricsBuilder_ == null) {
@ -500,6 +601,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addAllMetrics(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> values) {
if (metricsBuilder_ == null) {
@ -511,6 +615,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder clearMetrics() {
if (metricsBuilder_ == null) {
metrics_ = java.util.Collections.emptyList();
@ -521,6 +628,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder removeMetrics(int index) {
if (metricsBuilder_ == null) {
ensureMetricsIsMutable();
@ -531,10 +641,16 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder getMetricsBuilder(
int index) {
return getMetricsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index) {
if (metricsBuilder_ == null) {
@ -542,6 +658,9 @@ public final class MapReduceProtos {
return metricsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList() {
if (metricsBuilder_ != null) {
@ -550,15 +669,24 @@ public final class MapReduceProtos {
return java.util.Collections.unmodifiableList(metrics_);
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder() {
return getMetricsFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder(
int index) {
return getMetricsFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder>
getMetricsBuilderList() {
return getMetricsFieldBuilder().getBuilderList();
@ -618,9 +746,7 @@ public final class MapReduceProtos {
internal_static_ScanMetrics_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ScanMetrics_descriptor,
new java.lang.String[] { "Metrics", },
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class,
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
new java.lang.String[] { "Metrics", });
return null;
}
};

View File

@ -11,14 +11,18 @@ public final class MasterProtos {
public interface IsMasterRunningRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code IsMasterRunningRequest}
*/
public static final class IsMasterRunningRequest extends
com.google.protobuf.GeneratedMessage
implements IsMasterRunningRequestOrBuilder {
// Use IsMasterRunningRequest.newBuilder() to construct.
private IsMasterRunningRequest(Builder builder) {
private IsMasterRunningRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private IsMasterRunningRequest(boolean noInit) {}
private IsMasterRunningRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final IsMasterRunningRequest defaultInstance;
public static IsMasterRunningRequest getDefaultInstance() {
@ -29,6 +33,46 @@ public final class MasterProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private IsMasterRunningRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor;
@ -36,7 +80,24 @@ public final class MasterProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class);
}
public static com.google.protobuf.Parser<IsMasterRunningRequest> PARSER =
new com.google.protobuf.AbstractParser<IsMasterRunningRequest>() {
public IsMasterRunningRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new IsMasterRunningRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<IsMasterRunningRequest> getParserForType() {
return PARSER;
}
private void initFields() {
@ -90,79 +151,70 @@ public final class MasterProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -178,6 +230,9 @@ public final class MasterProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code IsMasterRunningRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequestOrBuilder {
@ -188,7 +243,9 @@ public final class MasterProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.newBuilder()
@ -196,7 +253,8 @@ public final class MasterProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -219,7 +277,7 @@ public final class MasterProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest getDefaultInstanceForType() {
@ -234,16 +292,6 @@ public final class MasterProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest(this);
onBuilt();
@ -273,29 +321,19 @@ public final class MasterProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:IsMasterRunningRequest)
}
@ -312,17 +350,27 @@ public final class MasterProtos {
extends com.google.protobuf.MessageOrBuilder {
// required bool is_master_running = 1;
/**
* <code>required bool is_master_running = 1;</code>
*/
boolean hasIsMasterRunning();
/**
* <code>required bool is_master_running = 1;</code>
*/
boolean getIsMasterRunning();
}
/**
* Protobuf type {@code IsMasterRunningResponse}
*/
public static final class IsMasterRunningResponse extends
com.google.protobuf.GeneratedMessage
implements IsMasterRunningResponseOrBuilder {
// Use IsMasterRunningResponse.newBuilder() to construct.
private IsMasterRunningResponse(Builder builder) {
private IsMasterRunningResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private IsMasterRunningResponse(boolean noInit) {}
private IsMasterRunningResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final IsMasterRunningResponse defaultInstance;
public static IsMasterRunningResponse getDefaultInstance() {
@ -333,6 +381,52 @@ public final class MasterProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private IsMasterRunningResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
isMasterRunning_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor;
@ -340,16 +434,39 @@ public final class MasterProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class);
}
public static com.google.protobuf.Parser<IsMasterRunningResponse> PARSER =
new com.google.protobuf.AbstractParser<IsMasterRunningResponse>() {
public IsMasterRunningResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new IsMasterRunningResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<IsMasterRunningResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bool is_master_running = 1;
public static final int IS_MASTER_RUNNING_FIELD_NUMBER = 1;
private boolean isMasterRunning_;
/**
* <code>required bool is_master_running = 1;</code>
*/
public boolean hasIsMasterRunning() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool is_master_running = 1;</code>
*/
public boolean getIsMasterRunning() {
return isMasterRunning_;
}
@ -422,8 +539,12 @@ public final class MasterProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasIsMasterRunning()) {
@ -431,74 +552,61 @@ public final class MasterProtos {
hash = (53 * hash) + hashBoolean(getIsMasterRunning());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -514,6 +622,9 @@ public final class MasterProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code IsMasterRunningResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponseOrBuilder {
@ -524,7 +635,9 @@ public final class MasterProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class, org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.newBuilder()
@ -532,7 +645,8 @@ public final class MasterProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -557,7 +671,7 @@ public final class MasterProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MasterProtos.internal_static_IsMasterRunningResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse getDefaultInstanceForType() {
@ -572,16 +686,6 @@ public final class MasterProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse result = new org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse(this);
int from_bitField0_ = bitField0_;
@ -625,50 +729,47 @@ public final class MasterProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
isMasterRunning_ = input.readBool();
break;
}
}
}
}
private int bitField0_;
// required bool is_master_running = 1;
private boolean isMasterRunning_ ;
/**
* <code>required bool is_master_running = 1;</code>
*/
public boolean hasIsMasterRunning() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool is_master_running = 1;</code>
*/
public boolean getIsMasterRunning() {
return isMasterRunning_;
}
/**
* <code>required bool is_master_running = 1;</code>
*/
public Builder setIsMasterRunning(boolean value) {
bitField0_ |= 0x00000001;
isMasterRunning_ = value;
onChanged();
return this;
}
/**
* <code>required bool is_master_running = 1;</code>
*/
public Builder clearIsMasterRunning() {
bitField0_ = (bitField0_ & ~0x00000001);
isMasterRunning_ = false;
@ -687,11 +788,21 @@ public final class MasterProtos {
// @@protoc_insertion_point(class_scope:IsMasterRunningResponse)
}
/**
* Protobuf service {@code MasterService}
*/
public static abstract class MasterService
implements com.google.protobuf.Service {
protected MasterService() {}
public interface Interface {
/**
* <code>rpc IsMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse);</code>
*
* <pre>
** return true if master is available
* </pre>
*/
public abstract void isMasterRunning(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request,
@ -774,6 +885,13 @@ public final class MasterProtos {
};
}
/**
* <code>rpc IsMasterRunning(.IsMasterRunningRequest) returns (.IsMasterRunningResponse);</code>
*
* <pre>
** return true if master is available
* </pre>
*/
public abstract void isMasterRunning(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest request,
@ -906,6 +1024,8 @@ public final class MasterProtos {
}
}
// @@protoc_insertion_point(class_scope:MasterService)
}
private static com.google.protobuf.Descriptors.Descriptor
@ -945,17 +1065,13 @@ public final class MasterProtos {
internal_static_IsMasterRunningRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_IsMasterRunningRequest_descriptor,
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.class,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningRequest.Builder.class);
new java.lang.String[] { });
internal_static_IsMasterRunningResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_IsMasterRunningResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_IsMasterRunningResponse_descriptor,
new java.lang.String[] { "IsMasterRunning", },
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsMasterRunningResponse.Builder.class);
new java.lang.String[] { "IsMasterRunning", });
return null;
}
};

View File

@ -12,23 +12,42 @@ public final class MultiRowMutation {
extends com.google.protobuf.MessageOrBuilder {
// repeated .MutationProto mutation_request = 1;
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto>
getMutationRequestList();
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index);
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
int getMutationRequestCount();
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationRequestOrBuilderList();
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder(
int index);
}
/**
* Protobuf type {@code MultiMutateRequest}
*/
public static final class MultiMutateRequest extends
com.google.protobuf.GeneratedMessage
implements MultiMutateRequestOrBuilder {
// Use MultiMutateRequest.newBuilder() to construct.
private MultiMutateRequest(Builder builder) {
private MultiMutateRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiMutateRequest(boolean noInit) {}
private MultiMutateRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiMutateRequest defaultInstance;
public static MultiMutateRequest getDefaultInstance() {
@ -39,6 +58,58 @@ public final class MultiRowMutation {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiMutateRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
mutationRequest_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto>();
mutable_bitField0_ |= 0x00000001;
}
mutationRequest_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
mutationRequest_ = java.util.Collections.unmodifiableList(mutationRequest_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor;
@ -46,25 +117,57 @@ public final class MultiRowMutation {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class);
}
public static com.google.protobuf.Parser<MultiMutateRequest> PARSER =
new com.google.protobuf.AbstractParser<MultiMutateRequest>() {
public MultiMutateRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiMutateRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiMutateRequest> getParserForType() {
return PARSER;
}
// repeated .MutationProto mutation_request = 1;
public static final int MUTATION_REQUEST_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> mutationRequest_;
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> getMutationRequestList() {
return mutationRequest_;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationRequestOrBuilderList() {
return mutationRequest_;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public int getMutationRequestCount() {
return mutationRequest_.size();
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) {
return mutationRequest_.get(index);
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder(
int index) {
return mutationRequest_.get(index);
@ -137,8 +240,12 @@ public final class MultiRowMutation {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getMutationRequestCount() > 0) {
@ -146,74 +253,61 @@ public final class MultiRowMutation {
hash = (53 * hash) + getMutationRequestList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -229,6 +323,9 @@ public final class MultiRowMutation {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiMutateRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequestOrBuilder {
@ -239,7 +336,9 @@ public final class MultiRowMutation {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.newBuilder()
@ -247,7 +346,8 @@ public final class MultiRowMutation {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -277,7 +377,7 @@ public final class MultiRowMutation {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest getDefaultInstanceForType() {
@ -292,16 +392,6 @@ public final class MultiRowMutation {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest(this);
int from_bitField0_ = bitField0_;
@ -373,35 +463,19 @@ public final class MultiRowMutation {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addMutationRequest(subBuilder.buildPartial());
break;
}
}
}
}
private int bitField0_;
// repeated .MutationProto mutation_request = 1;
@ -417,6 +491,9 @@ public final class MultiRowMutation {
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder> mutationRequestBuilder_;
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> getMutationRequestList() {
if (mutationRequestBuilder_ == null) {
return java.util.Collections.unmodifiableList(mutationRequest_);
@ -424,6 +501,9 @@ public final class MultiRowMutation {
return mutationRequestBuilder_.getMessageList();
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public int getMutationRequestCount() {
if (mutationRequestBuilder_ == null) {
return mutationRequest_.size();
@ -431,6 +511,9 @@ public final class MultiRowMutation {
return mutationRequestBuilder_.getCount();
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto getMutationRequest(int index) {
if (mutationRequestBuilder_ == null) {
return mutationRequest_.get(index);
@ -438,6 +521,9 @@ public final class MultiRowMutation {
return mutationRequestBuilder_.getMessage(index);
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public Builder setMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationRequestBuilder_ == null) {
@ -452,6 +538,9 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public Builder setMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
if (mutationRequestBuilder_ == null) {
@ -463,6 +552,9 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public Builder addMutationRequest(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationRequestBuilder_ == null) {
if (value == null) {
@ -476,6 +568,9 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public Builder addMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto value) {
if (mutationRequestBuilder_ == null) {
@ -490,6 +585,9 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public Builder addMutationRequest(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
if (mutationRequestBuilder_ == null) {
@ -501,6 +599,9 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public Builder addMutationRequest(
int index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder builderForValue) {
if (mutationRequestBuilder_ == null) {
@ -512,6 +613,9 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public Builder addAllMutationRequest(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto> values) {
if (mutationRequestBuilder_ == null) {
@ -523,6 +627,9 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public Builder clearMutationRequest() {
if (mutationRequestBuilder_ == null) {
mutationRequest_ = java.util.Collections.emptyList();
@ -533,6 +640,9 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public Builder removeMutationRequest(int index) {
if (mutationRequestBuilder_ == null) {
ensureMutationRequestIsMutable();
@ -543,10 +653,16 @@ public final class MultiRowMutation {
}
return this;
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder getMutationRequestBuilder(
int index) {
return getMutationRequestFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder getMutationRequestOrBuilder(
int index) {
if (mutationRequestBuilder_ == null) {
@ -554,6 +670,9 @@ public final class MultiRowMutation {
return mutationRequestBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProtoOrBuilder>
getMutationRequestOrBuilderList() {
if (mutationRequestBuilder_ != null) {
@ -562,15 +681,24 @@ public final class MultiRowMutation {
return java.util.Collections.unmodifiableList(mutationRequest_);
}
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder() {
return getMutationRequestFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance());
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder addMutationRequestBuilder(
int index) {
return getMutationRequestFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.getDefaultInstance());
}
/**
* <code>repeated .MutationProto mutation_request = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.Builder>
getMutationRequestBuilderList() {
return getMutationRequestFieldBuilder().getBuilderList();
@ -604,14 +732,18 @@ public final class MultiRowMutation {
public interface MultiMutateResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiMutateResponse}
*/
public static final class MultiMutateResponse extends
com.google.protobuf.GeneratedMessage
implements MultiMutateResponseOrBuilder {
// Use MultiMutateResponse.newBuilder() to construct.
private MultiMutateResponse(Builder builder) {
private MultiMutateResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiMutateResponse(boolean noInit) {}
private MultiMutateResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiMutateResponse defaultInstance;
public static MultiMutateResponse getDefaultInstance() {
@ -622,6 +754,46 @@ public final class MultiRowMutation {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiMutateResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor;
@ -629,7 +801,24 @@ public final class MultiRowMutation {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class);
}
public static com.google.protobuf.Parser<MultiMutateResponse> PARSER =
new com.google.protobuf.AbstractParser<MultiMutateResponse>() {
public MultiMutateResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiMutateResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiMutateResponse> getParserForType() {
return PARSER;
}
private void initFields() {
@ -683,79 +872,70 @@ public final class MultiRowMutation {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -771,6 +951,9 @@ public final class MultiRowMutation {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiMutateResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponseOrBuilder {
@ -781,7 +964,9 @@ public final class MultiRowMutation {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.newBuilder()
@ -789,7 +974,8 @@ public final class MultiRowMutation {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -812,7 +998,7 @@ public final class MultiRowMutation {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.internal_static_MultiMutateResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse getDefaultInstanceForType() {
@ -827,16 +1013,6 @@ public final class MultiRowMutation {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse(this);
onBuilt();
@ -866,29 +1042,19 @@ public final class MultiRowMutation {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:MultiMutateResponse)
}
@ -901,11 +1067,17 @@ public final class MultiRowMutation {
// @@protoc_insertion_point(class_scope:MultiMutateResponse)
}
/**
* Protobuf service {@code MultiRowMutationService}
*/
public static abstract class MultiRowMutationService
implements com.google.protobuf.Service {
protected MultiRowMutationService() {}
public interface Interface {
/**
* <code>rpc MutateRows(.MultiMutateRequest) returns (.MultiMutateResponse);</code>
*/
public abstract void mutateRows(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request,
@ -988,6 +1160,9 @@ public final class MultiRowMutation {
};
}
/**
* <code>rpc MutateRows(.MultiMutateRequest) returns (.MultiMutateResponse);</code>
*/
public abstract void mutateRows(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest request,
@ -1120,6 +1295,8 @@ public final class MultiRowMutation {
}
}
// @@protoc_insertion_point(class_scope:MultiRowMutationService)
}
private static com.google.protobuf.Descriptors.Descriptor
@ -1160,17 +1337,13 @@ public final class MultiRowMutation {
internal_static_MultiMutateRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiMutateRequest_descriptor,
new java.lang.String[] { "MutationRequest", },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateRequest.Builder.class);
new java.lang.String[] { "MutationRequest", });
internal_static_MultiMutateResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_MultiMutateResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiMutateResponse_descriptor,
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutation.MultiMutateResponse.Builder.class);
new java.lang.String[] { });
return null;
}
};

View File

@ -11,14 +11,18 @@ public final class MultiRowMutationProcessorProtos {
public interface MultiRowMutationProcessorRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiRowMutationProcessorRequest}
*/
public static final class MultiRowMutationProcessorRequest extends
com.google.protobuf.GeneratedMessage
implements MultiRowMutationProcessorRequestOrBuilder {
// Use MultiRowMutationProcessorRequest.newBuilder() to construct.
private MultiRowMutationProcessorRequest(Builder builder) {
private MultiRowMutationProcessorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiRowMutationProcessorRequest(boolean noInit) {}
private MultiRowMutationProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiRowMutationProcessorRequest defaultInstance;
public static MultiRowMutationProcessorRequest getDefaultInstance() {
@ -29,6 +33,46 @@ public final class MultiRowMutationProcessorProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiRowMutationProcessorRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
@ -36,7 +80,24 @@ public final class MultiRowMutationProcessorProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
}
public static com.google.protobuf.Parser<MultiRowMutationProcessorRequest> PARSER =
new com.google.protobuf.AbstractParser<MultiRowMutationProcessorRequest>() {
public MultiRowMutationProcessorRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiRowMutationProcessorRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiRowMutationProcessorRequest> getParserForType() {
return PARSER;
}
private void initFields() {
@ -90,79 +151,70 @@ public final class MultiRowMutationProcessorProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -178,6 +230,9 @@ public final class MultiRowMutationProcessorProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiRowMutationProcessorRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequestOrBuilder {
@ -188,7 +243,9 @@ public final class MultiRowMutationProcessorProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.newBuilder()
@ -196,7 +253,8 @@ public final class MultiRowMutationProcessorProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -219,7 +277,7 @@ public final class MultiRowMutationProcessorProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest getDefaultInstanceForType() {
@ -234,16 +292,6 @@ public final class MultiRowMutationProcessorProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest(this);
onBuilt();
@ -273,29 +321,19 @@ public final class MultiRowMutationProcessorProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorRequest)
}
@ -311,14 +349,18 @@ public final class MultiRowMutationProcessorProtos {
public interface MultiRowMutationProcessorResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiRowMutationProcessorResponse}
*/
public static final class MultiRowMutationProcessorResponse extends
com.google.protobuf.GeneratedMessage
implements MultiRowMutationProcessorResponseOrBuilder {
// Use MultiRowMutationProcessorResponse.newBuilder() to construct.
private MultiRowMutationProcessorResponse(Builder builder) {
private MultiRowMutationProcessorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiRowMutationProcessorResponse(boolean noInit) {}
private MultiRowMutationProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiRowMutationProcessorResponse defaultInstance;
public static MultiRowMutationProcessorResponse getDefaultInstance() {
@ -329,6 +371,46 @@ public final class MultiRowMutationProcessorProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiRowMutationProcessorResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
@ -336,7 +418,24 @@ public final class MultiRowMutationProcessorProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
}
public static com.google.protobuf.Parser<MultiRowMutationProcessorResponse> PARSER =
new com.google.protobuf.AbstractParser<MultiRowMutationProcessorResponse>() {
public MultiRowMutationProcessorResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiRowMutationProcessorResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiRowMutationProcessorResponse> getParserForType() {
return PARSER;
}
private void initFields() {
@ -390,79 +489,70 @@ public final class MultiRowMutationProcessorProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -478,6 +568,9 @@ public final class MultiRowMutationProcessorProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiRowMutationProcessorResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponseOrBuilder {
@ -488,7 +581,9 @@ public final class MultiRowMutationProcessorProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.newBuilder()
@ -496,7 +591,8 @@ public final class MultiRowMutationProcessorProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -519,7 +615,7 @@ public final class MultiRowMutationProcessorProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse getDefaultInstanceForType() {
@ -534,16 +630,6 @@ public final class MultiRowMutationProcessorProtos {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse(this);
onBuilt();
@ -573,29 +659,19 @@ public final class MultiRowMutationProcessorProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
}
}
}
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorResponse)
}
@ -643,17 +719,13 @@ public final class MultiRowMutationProcessorProtos {
internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiRowMutationProcessorRequest_descriptor,
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
new java.lang.String[] { });
internal_static_MultiRowMutationProcessorResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiRowMutationProcessorResponse_descriptor,
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
new java.lang.String[] { });
return null;
}
};

View File

@ -12,21 +12,45 @@ public final class Tracing {
extends com.google.protobuf.MessageOrBuilder {
// optional int64 trace_id = 1;
/**
* <code>optional int64 trace_id = 1;</code>
*/
boolean hasTraceId();
/**
* <code>optional int64 trace_id = 1;</code>
*/
long getTraceId();
// optional int64 parent_id = 2;
/**
* <code>optional int64 parent_id = 2;</code>
*/
boolean hasParentId();
/**
* <code>optional int64 parent_id = 2;</code>
*/
long getParentId();
}
/**
* Protobuf type {@code RPCTInfo}
*
* <pre>
*Used to pass through the information necessary to continue
*a trace after an RPC is made. All we need is the traceid
*(so we know the overarching trace this message is a part of), and
*the id of the current span when this message was sent, so we know
*what span caused the new span we will create when this message is received.
* </pre>
*/
public static final class RPCTInfo extends
com.google.protobuf.GeneratedMessage
implements RPCTInfoOrBuilder {
// Use RPCTInfo.newBuilder() to construct.
private RPCTInfo(Builder builder) {
private RPCTInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RPCTInfo(boolean noInit) {}
private RPCTInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RPCTInfo defaultInstance;
public static RPCTInfo getDefaultInstance() {
@ -37,6 +61,57 @@ public final class Tracing {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RPCTInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
traceId_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
parentId_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
@ -44,16 +119,39 @@ public final class Tracing {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
}
public static com.google.protobuf.Parser<RPCTInfo> PARSER =
new com.google.protobuf.AbstractParser<RPCTInfo>() {
public RPCTInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RPCTInfo(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RPCTInfo> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional int64 trace_id = 1;
public static final int TRACE_ID_FIELD_NUMBER = 1;
private long traceId_;
/**
* <code>optional int64 trace_id = 1;</code>
*/
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 trace_id = 1;</code>
*/
public long getTraceId() {
return traceId_;
}
@ -61,9 +159,15 @@ public final class Tracing {
// optional int64 parent_id = 2;
public static final int PARENT_ID_FIELD_NUMBER = 2;
private long parentId_;
/**
* <code>optional int64 parent_id = 2;</code>
*/
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 parent_id = 2;</code>
*/
public long getParentId() {
return parentId_;
}
@ -145,8 +249,12 @@ public final class Tracing {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasTraceId()) {
@ -158,74 +266,61 @@ public final class Tracing {
hash = (53 * hash) + hashLong(getParentId());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -241,6 +336,17 @@ public final class Tracing {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code RPCTInfo}
*
* <pre>
*Used to pass through the information necessary to continue
*a trace after an RPC is made. All we need is the traceid
*(so we know the overarching trace this message is a part of), and
*the id of the current span when this message was sent, so we know
*what span caused the new span we will create when this message is received.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder {
@ -251,7 +357,9 @@ public final class Tracing {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder()
@ -259,7 +367,8 @@ public final class Tracing {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -286,7 +395,7 @@ public final class Tracing {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getDefaultInstanceForType() {
@ -301,16 +410,6 @@ public final class Tracing {
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo(this);
int from_bitField0_ = bitField0_;
@ -357,55 +456,47 @@ public final class Tracing {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
traceId_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
parentId_ = input.readInt64();
break;
}
}
}
}
private int bitField0_;
// optional int64 trace_id = 1;
private long traceId_ ;
/**
* <code>optional int64 trace_id = 1;</code>
*/
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 trace_id = 1;</code>
*/
public long getTraceId() {
return traceId_;
}
/**
* <code>optional int64 trace_id = 1;</code>
*/
public Builder setTraceId(long value) {
bitField0_ |= 0x00000001;
traceId_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 trace_id = 1;</code>
*/
public Builder clearTraceId() {
bitField0_ = (bitField0_ & ~0x00000001);
traceId_ = 0L;
@ -415,18 +506,30 @@ public final class Tracing {
// optional int64 parent_id = 2;
private long parentId_ ;
/**
* <code>optional int64 parent_id = 2;</code>
*/
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 parent_id = 2;</code>
*/
public long getParentId() {
return parentId_;
}
/**
* <code>optional int64 parent_id = 2;</code>
*/
public Builder setParentId(long value) {
bitField0_ |= 0x00000002;
parentId_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 parent_id = 2;</code>
*/
public Builder clearParentId() {
bitField0_ = (bitField0_ & ~0x00000002);
parentId_ = 0L;
@ -474,9 +577,7 @@ public final class Tracing {
internal_static_RPCTInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_RPCTInfo_descriptor,
new java.lang.String[] { "TraceId", "ParentId", },
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class,
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
new java.lang.String[] { "TraceId", "ParentId", });
return null;
}
};

View File

@ -404,19 +404,17 @@ service MasterAdminService {
/**
* Create a snapshot for the given table.
* @param snapshot description of the snapshot to take
*/
rpc Snapshot(TakeSnapshotRequest) returns(TakeSnapshotResponse);
/**
* List completed snapshots.
* Returns a list of snapshot descriptors for completed snapshots
* @return a list of snapshot descriptors for completed snapshots
*/
rpc GetCompletedSnapshots(ListSnapshotRequest) returns(ListSnapshotResponse);
/**
* Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
* @param snapshotName snapshot to delete
*/
rpc DeleteSnapshot(DeleteSnapshotRequest) returns(DeleteSnapshotResponse);
@ -427,7 +425,6 @@ service MasterAdminService {
/**
* Restore a snapshot
* @param snapshot description of the snapshot to restore
*/
rpc RestoreSnapshot(RestoreSnapshotRequest) returns(RestoreSnapshotResponse);

View File

@ -12,29 +12,65 @@ public final class CellMessage {
extends com.google.protobuf.MessageOrBuilder {
// optional bytes row = 1;
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
boolean hasRow();
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
com.google.protobuf.ByteString getRow();
// optional bytes column = 2;
/**
* <code>optional bytes column = 2;</code>
*/
boolean hasColumn();
/**
* <code>optional bytes column = 2;</code>
*/
com.google.protobuf.ByteString getColumn();
// optional int64 timestamp = 3;
/**
* <code>optional int64 timestamp = 3;</code>
*/
boolean hasTimestamp();
/**
* <code>optional int64 timestamp = 3;</code>
*/
long getTimestamp();
// optional bytes data = 4;
/**
* <code>optional bytes data = 4;</code>
*/
boolean hasData();
/**
* <code>optional bytes data = 4;</code>
*/
com.google.protobuf.ByteString getData();
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
*/
public static final class Cell extends
com.google.protobuf.GeneratedMessage
implements CellOrBuilder {
// Use Cell.newBuilder() to construct.
private Cell(Builder builder) {
private Cell(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Cell(boolean noInit) {}
private Cell(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Cell defaultInstance;
public static Cell getDefaultInstance() {
@ -45,6 +81,67 @@ public final class CellMessage {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Cell(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
row_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
column_ = input.readBytes();
break;
}
case 24: {
bitField0_ |= 0x00000004;
timestamp_ = input.readInt64();
break;
}
case 34: {
bitField0_ |= 0x00000008;
data_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
@ -52,16 +149,47 @@ public final class CellMessage {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable;
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
}
public static com.google.protobuf.Parser<Cell> PARSER =
new com.google.protobuf.AbstractParser<Cell>() {
public Cell parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Cell(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Cell> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bytes row = 1;
public static final int ROW_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString row_;
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
@ -69,9 +197,15 @@ public final class CellMessage {
// optional bytes column = 2;
public static final int COLUMN_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString column_;
/**
* <code>optional bytes column = 2;</code>
*/
public boolean hasColumn() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes column = 2;</code>
*/
public com.google.protobuf.ByteString getColumn() {
return column_;
}
@ -79,9 +213,15 @@ public final class CellMessage {
// optional int64 timestamp = 3;
public static final int TIMESTAMP_FIELD_NUMBER = 3;
private long timestamp_;
/**
* <code>optional int64 timestamp = 3;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 timestamp = 3;</code>
*/
public long getTimestamp() {
return timestamp_;
}
@ -89,9 +229,15 @@ public final class CellMessage {
// optional bytes data = 4;
public static final int DATA_FIELD_NUMBER = 4;
private com.google.protobuf.ByteString data_;
/**
* <code>optional bytes data = 4;</code>
*/
public boolean hasData() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bytes data = 4;</code>
*/
public com.google.protobuf.ByteString getData() {
return data_;
}
@ -166,68 +312,54 @@ public final class CellMessage {
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -243,6 +375,9 @@ public final class CellMessage {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.Cell}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.CellOrBuilder {
@ -253,7 +388,9 @@ public final class CellMessage {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable;
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class, org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
}
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.newBuilder()
@ -261,7 +398,8 @@ public final class CellMessage {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -292,7 +430,7 @@ public final class CellMessage {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.getDescriptor();
return org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor;
}
public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell getDefaultInstanceForType() {
@ -307,16 +445,6 @@ public final class CellMessage {
return result;
}
private org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell buildPartial() {
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell result = new org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell(this);
int from_bitField0_ = bitField0_;
@ -377,59 +505,50 @@ public final class CellMessage {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
row_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
column_ = input.readBytes();
break;
}
case 24: {
bitField0_ |= 0x00000004;
timestamp_ = input.readInt64();
break;
}
case 34: {
bitField0_ |= 0x00000008;
data_ = input.readBytes();
break;
}
}
}
}
private int bitField0_;
// optional bytes row = 1;
private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public boolean hasRow() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public com.google.protobuf.ByteString getRow() {
return row_;
}
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public Builder setRow(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -439,6 +558,13 @@ public final class CellMessage {
onChanged();
return this;
}
/**
* <code>optional bytes row = 1;</code>
*
* <pre>
* unused if Cell is in a CellSet
* </pre>
*/
public Builder clearRow() {
bitField0_ = (bitField0_ & ~0x00000001);
row_ = getDefaultInstance().getRow();
@ -448,12 +574,21 @@ public final class CellMessage {
// optional bytes column = 2;
private com.google.protobuf.ByteString column_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes column = 2;</code>
*/
public boolean hasColumn() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes column = 2;</code>
*/
public com.google.protobuf.ByteString getColumn() {
return column_;
}
/**
* <code>optional bytes column = 2;</code>
*/
public Builder setColumn(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -463,6 +598,9 @@ public final class CellMessage {
onChanged();
return this;
}
/**
* <code>optional bytes column = 2;</code>
*/
public Builder clearColumn() {
bitField0_ = (bitField0_ & ~0x00000002);
column_ = getDefaultInstance().getColumn();
@ -472,18 +610,30 @@ public final class CellMessage {
// optional int64 timestamp = 3;
private long timestamp_ ;
/**
* <code>optional int64 timestamp = 3;</code>
*/
public boolean hasTimestamp() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional int64 timestamp = 3;</code>
*/
public long getTimestamp() {
return timestamp_;
}
/**
* <code>optional int64 timestamp = 3;</code>
*/
public Builder setTimestamp(long value) {
bitField0_ |= 0x00000004;
timestamp_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 timestamp = 3;</code>
*/
public Builder clearTimestamp() {
bitField0_ = (bitField0_ & ~0x00000004);
timestamp_ = 0L;
@ -493,12 +643,21 @@ public final class CellMessage {
// optional bytes data = 4;
private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes data = 4;</code>
*/
public boolean hasData() {
return ((bitField0_ & 0x00000008) == 0x00000008);
}
/**
* <code>optional bytes data = 4;</code>
*/
public com.google.protobuf.ByteString getData() {
return data_;
}
/**
* <code>optional bytes data = 4;</code>
*/
public Builder setData(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -508,6 +667,9 @@ public final class CellMessage {
onChanged();
return this;
}
/**
* <code>optional bytes data = 4;</code>
*/
public Builder clearData() {
bitField0_ = (bitField0_ & ~0x00000008);
data_ = getDefaultInstance().getData();
@ -555,9 +717,7 @@ public final class CellMessage {
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_Cell_descriptor,
new java.lang.String[] { "Row", "Column", "Timestamp", "Data", },
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.class,
org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell.Builder.class);
new java.lang.String[] { "Row", "Column", "Timestamp", "Data", });
return null;
}
};

View File

@ -12,18 +12,37 @@ public final class TableListMessage {
extends com.google.protobuf.MessageOrBuilder {
// repeated string name = 1;
java.util.List<String> getNameList();
/**
* <code>repeated string name = 1;</code>
*/
java.util.List<java.lang.String>
getNameList();
/**
* <code>repeated string name = 1;</code>
*/
int getNameCount();
String getName(int index);
/**
* <code>repeated string name = 1;</code>
*/
java.lang.String getName(int index);
/**
* <code>repeated string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes(int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
*/
public static final class TableList extends
com.google.protobuf.GeneratedMessage
implements TableListOrBuilder {
// Use TableList.newBuilder() to construct.
private TableList(Builder builder) {
private TableList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private TableList(boolean noInit) {}
private TableList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final TableList defaultInstance;
public static TableList getDefaultInstance() {
@ -34,6 +53,58 @@ public final class TableListMessage {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TableList(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
name_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
name_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
name_ = new com.google.protobuf.UnmodifiableLazyStringList(name_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
@ -41,22 +112,55 @@ public final class TableListMessage {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
}
public static com.google.protobuf.Parser<TableList> PARSER =
new com.google.protobuf.AbstractParser<TableList>() {
public TableList parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TableList(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<TableList> getParserForType() {
return PARSER;
}
// repeated string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList name_;
public java.util.List<String>
/**
* <code>repeated string name = 1;</code>
*/
public java.util.List<java.lang.String>
getNameList() {
return name_;
}
/**
* <code>repeated string name = 1;</code>
*/
public int getNameCount() {
return name_.size();
}
public String getName(int index) {
/**
* <code>repeated string name = 1;</code>
*/
public java.lang.String getName(int index) {
return name_.get(index);
}
/**
* <code>repeated string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes(int index) {
return name_.getByteString(index);
}
private void initFields() {
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
@ -109,68 +213,54 @@ public final class TableListMessage {
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -186,6 +276,9 @@ public final class TableListMessage {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableListOrBuilder {
@ -196,7 +289,9 @@ public final class TableListMessage {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
}
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.newBuilder()
@ -204,7 +299,8 @@ public final class TableListMessage {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -229,7 +325,7 @@ public final class TableListMessage {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDescriptor();
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
}
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList getDefaultInstanceForType() {
@ -244,16 +340,6 @@ public final class TableListMessage {
return result;
}
private org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildPartial() {
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList(this);
int from_bitField0_ = bitField0_;
@ -300,34 +386,19 @@ public final class TableListMessage {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 10: {
ensureNameIsMutable();
name_.add(input.readBytes());
break;
}
}
}
}
private int bitField0_;
// repeated string name = 1;
@ -338,18 +409,37 @@ public final class TableListMessage {
bitField0_ |= 0x00000001;
}
}
public java.util.List<String>
/**
* <code>repeated string name = 1;</code>
*/
public java.util.List<java.lang.String>
getNameList() {
return java.util.Collections.unmodifiableList(name_);
}
/**
* <code>repeated string name = 1;</code>
*/
public int getNameCount() {
return name_.size();
}
public String getName(int index) {
/**
* <code>repeated string name = 1;</code>
*/
public java.lang.String getName(int index) {
return name_.get(index);
}
/**
* <code>repeated string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes(int index) {
return name_.getByteString(index);
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder setName(
int index, String value) {
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
@ -358,7 +448,11 @@ public final class TableListMessage {
onChanged();
return this;
}
public Builder addName(String value) {
/**
* <code>repeated string name = 1;</code>
*/
public Builder addName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
@ -367,23 +461,37 @@ public final class TableListMessage {
onChanged();
return this;
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder addAllName(
java.lang.Iterable<String> values) {
java.lang.Iterable<java.lang.String> values) {
ensureNameIsMutable();
super.addAll(values, name_);
onChanged();
return this;
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder clearName() {
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
void addName(com.google.protobuf.ByteString value) {
/**
* <code>repeated string name = 1;</code>
*/
public Builder addNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureNameIsMutable();
name_.add(value);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableList)
@ -425,9 +533,7 @@ public final class TableListMessage {
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor,
new java.lang.String[] { "Name", },
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class,
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
new java.lang.String[] { "Name", });
return null;
}
};

View File

@ -1,7 +1,4 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@ -120,6 +117,8 @@ import org.apache.zookeeper.ZooKeeper.States;
* Depends on log4j being on classpath and
* hbase-site.xml for logging and test-run configuration. It does not set
* logging levels nor make changes to configuration parameters.
* <p>To preserve test data directories, pass the system property "hbase.testing.preserve.testdir"
* setting it to true.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@ -284,7 +283,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
private void createSubDir(String propertyName, Path parent, String subDirName){
Path newPath= new Path(parent, subDirName);
File newDir = new File(newPath.toString()).getAbsoluteFile();
newDir.deleteOnExit();
if (deleteOnExit()) newDir.deleteOnExit();
conf.set(propertyName, newDir.getAbsolutePath());
}
@ -350,9 +349,10 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
Path testDir = getDataTestDir("dfscluster_" + UUID.randomUUID().toString());
clusterTestDir = new File(testDir.toString()).getAbsoluteFile();
// Have it cleaned up on exit
clusterTestDir.deleteOnExit();
boolean b = deleteOnExit();
if (b) clusterTestDir.deleteOnExit();
conf.set(TEST_DIRECTORY_KEY, clusterTestDir.getPath());
LOG.info("Created new mini-cluster data directory: " + clusterTestDir);
LOG.info("Created new mini-cluster data directory: " + clusterTestDir + ", deleteOnExit=" + b);
}
/**
@ -397,13 +397,13 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
FileSystem fs = getTestFileSystem();
if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) {
File dataTestDir = new File(getDataTestDir().toString());
dataTestDir.deleteOnExit();
if (deleteOnExit()) dataTestDir.deleteOnExit();
dataTestDirOnTestFS = new Path(dataTestDir.getAbsolutePath());
} else {
Path base = getBaseTestDirOnTestFS();
String randomStr = UUID.randomUUID().toString();
dataTestDirOnTestFS = new Path(base, randomStr);
fs.deleteOnExit(dataTestDirOnTestFS);
if (deleteOnExit()) fs.deleteOnExit(dataTestDirOnTestFS);
}
}

View File

@ -12,21 +12,37 @@ public final class ColumnAggregationProtos {
extends com.google.protobuf.MessageOrBuilder {
// required bytes family = 1;
/**
* <code>required bytes family = 1;</code>
*/
boolean hasFamily();
/**
* <code>required bytes family = 1;</code>
*/
com.google.protobuf.ByteString getFamily();
// optional bytes qualifier = 2;
/**
* <code>optional bytes qualifier = 2;</code>
*/
boolean hasQualifier();
/**
* <code>optional bytes qualifier = 2;</code>
*/
com.google.protobuf.ByteString getQualifier();
}
/**
* Protobuf type {@code SumRequest}
*/
public static final class SumRequest extends
com.google.protobuf.GeneratedMessage
implements SumRequestOrBuilder {
// Use SumRequest.newBuilder() to construct.
private SumRequest(Builder builder) {
private SumRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SumRequest(boolean noInit) {}
private SumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SumRequest defaultInstance;
public static SumRequest getDefaultInstance() {
@ -37,6 +53,57 @@ public final class ColumnAggregationProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SumRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
family_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
qualifier_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor;
@ -44,16 +111,39 @@ public final class ColumnAggregationProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class);
}
public static com.google.protobuf.Parser<SumRequest> PARSER =
new com.google.protobuf.AbstractParser<SumRequest>() {
public SumRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SumRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SumRequest> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bytes family = 1;
public static final int FAMILY_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString family_;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
@ -61,9 +151,15 @@ public final class ColumnAggregationProtos {
// optional bytes qualifier = 2;
public static final int QUALIFIER_FIELD_NUMBER = 2;
private com.google.protobuf.ByteString qualifier_;
/**
* <code>optional bytes qualifier = 2;</code>
*/
public boolean hasQualifier() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes qualifier = 2;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
@ -149,8 +245,12 @@ public final class ColumnAggregationProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasFamily()) {
@ -162,74 +262,61 @@ public final class ColumnAggregationProtos {
hash = (53 * hash) + getQualifier().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -245,6 +332,9 @@ public final class ColumnAggregationProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code SumRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequestOrBuilder {
@ -255,7 +345,9 @@ public final class ColumnAggregationProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.newBuilder()
@ -263,7 +355,8 @@ public final class ColumnAggregationProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -290,7 +383,7 @@ public final class ColumnAggregationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.getDescriptor();
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumRequest_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest getDefaultInstanceForType() {
@ -305,16 +398,6 @@ public final class ColumnAggregationProtos {
return result;
}
private org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest buildPartial() {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest(this);
int from_bitField0_ = bitField0_;
@ -365,49 +448,38 @@ public final class ColumnAggregationProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
family_ = input.readBytes();
break;
}
case 18: {
bitField0_ |= 0x00000002;
qualifier_ = input.readBytes();
break;
}
}
}
}
private int bitField0_;
// required bytes family = 1;
private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>required bytes family = 1;</code>
*/
public boolean hasFamily() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bytes family = 1;</code>
*/
public com.google.protobuf.ByteString getFamily() {
return family_;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder setFamily(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -417,6 +489,9 @@ public final class ColumnAggregationProtos {
onChanged();
return this;
}
/**
* <code>required bytes family = 1;</code>
*/
public Builder clearFamily() {
bitField0_ = (bitField0_ & ~0x00000001);
family_ = getDefaultInstance().getFamily();
@ -426,12 +501,21 @@ public final class ColumnAggregationProtos {
// optional bytes qualifier = 2;
private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY;
/**
* <code>optional bytes qualifier = 2;</code>
*/
public boolean hasQualifier() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional bytes qualifier = 2;</code>
*/
public com.google.protobuf.ByteString getQualifier() {
return qualifier_;
}
/**
* <code>optional bytes qualifier = 2;</code>
*/
public Builder setQualifier(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
@ -441,6 +525,9 @@ public final class ColumnAggregationProtos {
onChanged();
return this;
}
/**
* <code>optional bytes qualifier = 2;</code>
*/
public Builder clearQualifier() {
bitField0_ = (bitField0_ & ~0x00000002);
qualifier_ = getDefaultInstance().getQualifier();
@ -463,17 +550,27 @@ public final class ColumnAggregationProtos {
extends com.google.protobuf.MessageOrBuilder {
// required int64 sum = 1;
/**
* <code>required int64 sum = 1;</code>
*/
boolean hasSum();
/**
* <code>required int64 sum = 1;</code>
*/
long getSum();
}
/**
* Protobuf type {@code SumResponse}
*/
public static final class SumResponse extends
com.google.protobuf.GeneratedMessage
implements SumResponseOrBuilder {
// Use SumResponse.newBuilder() to construct.
private SumResponse(Builder builder) {
private SumResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private SumResponse(boolean noInit) {}
private SumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final SumResponse defaultInstance;
public static SumResponse getDefaultInstance() {
@ -484,6 +581,52 @@ public final class ColumnAggregationProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private SumResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
sum_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor;
@ -491,16 +634,39 @@ public final class ColumnAggregationProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class);
}
public static com.google.protobuf.Parser<SumResponse> PARSER =
new com.google.protobuf.AbstractParser<SumResponse>() {
public SumResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new SumResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<SumResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int64 sum = 1;
public static final int SUM_FIELD_NUMBER = 1;
private long sum_;
/**
* <code>required int64 sum = 1;</code>
*/
public boolean hasSum() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 sum = 1;</code>
*/
public long getSum() {
return sum_;
}
@ -573,8 +739,12 @@ public final class ColumnAggregationProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasSum()) {
@ -582,74 +752,61 @@ public final class ColumnAggregationProtos {
hash = (53 * hash) + hashLong(getSum());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -665,6 +822,9 @@ public final class ColumnAggregationProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code SumResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponseOrBuilder {
@ -675,7 +835,9 @@ public final class ColumnAggregationProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.newBuilder()
@ -683,7 +845,8 @@ public final class ColumnAggregationProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -708,7 +871,7 @@ public final class ColumnAggregationProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.getDescriptor();
return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.internal_static_SumResponse_descriptor;
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse getDefaultInstanceForType() {
@ -723,16 +886,6 @@ public final class ColumnAggregationProtos {
return result;
}
private org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse buildPartial() {
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse(this);
int from_bitField0_ = bitField0_;
@ -776,50 +929,47 @@ public final class ColumnAggregationProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
sum_ = input.readInt64();
break;
}
}
}
}
private int bitField0_;
// required int64 sum = 1;
private long sum_ ;
/**
* <code>required int64 sum = 1;</code>
*/
public boolean hasSum() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int64 sum = 1;</code>
*/
public long getSum() {
return sum_;
}
/**
* <code>required int64 sum = 1;</code>
*/
public Builder setSum(long value) {
bitField0_ |= 0x00000001;
sum_ = value;
onChanged();
return this;
}
/**
* <code>required int64 sum = 1;</code>
*/
public Builder clearSum() {
bitField0_ = (bitField0_ & ~0x00000001);
sum_ = 0L;
@ -838,11 +988,17 @@ public final class ColumnAggregationProtos {
// @@protoc_insertion_point(class_scope:SumResponse)
}
/**
* Protobuf service {@code ColumnAggregationService}
*/
public static abstract class ColumnAggregationService
implements com.google.protobuf.Service {
protected ColumnAggregationService() {}
public interface Interface {
/**
* <code>rpc sum(.SumRequest) returns (.SumResponse);</code>
*/
public abstract void sum(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request,
@ -925,6 +1081,9 @@ public final class ColumnAggregationProtos {
};
}
/**
* <code>rpc sum(.SumRequest) returns (.SumResponse);</code>
*/
public abstract void sum(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest request,
@ -1057,6 +1216,8 @@ public final class ColumnAggregationProtos {
}
}
// @@protoc_insertion_point(class_scope:ColumnAggregationService)
}
private static com.google.protobuf.Descriptors.Descriptor
@ -1096,17 +1257,13 @@ public final class ColumnAggregationProtos {
internal_static_SumRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_SumRequest_descriptor,
new java.lang.String[] { "Family", "Qualifier", },
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.class,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumRequest.Builder.class);
new java.lang.String[] { "Family", "Qualifier", });
internal_static_SumResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_SumResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_SumResponse_descriptor,
new java.lang.String[] { "Sum", },
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.class,
org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos.SumResponse.Builder.class);
new java.lang.String[] { "Sum", });
return null;
}
};

View File

@ -12,17 +12,27 @@ public final class TestDelayedRpcProtos {
extends com.google.protobuf.MessageOrBuilder {
// required bool delay = 1;
/**
* <code>required bool delay = 1;</code>
*/
boolean hasDelay();
/**
* <code>required bool delay = 1;</code>
*/
boolean getDelay();
}
/**
* Protobuf type {@code TestArg}
*/
public static final class TestArg extends
com.google.protobuf.GeneratedMessage
implements TestArgOrBuilder {
// Use TestArg.newBuilder() to construct.
private TestArg(Builder builder) {
private TestArg(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private TestArg(boolean noInit) {}
private TestArg(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final TestArg defaultInstance;
public static TestArg getDefaultInstance() {
@ -33,6 +43,52 @@ public final class TestDelayedRpcProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TestArg(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
delay_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_descriptor;
@ -40,16 +96,39 @@ public final class TestDelayedRpcProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable;
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.Builder.class);
}
public static com.google.protobuf.Parser<TestArg> PARSER =
new com.google.protobuf.AbstractParser<TestArg>() {
public TestArg parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TestArg(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<TestArg> getParserForType() {
return PARSER;
}
private int bitField0_;
// required bool delay = 1;
public static final int DELAY_FIELD_NUMBER = 1;
private boolean delay_;
/**
* <code>required bool delay = 1;</code>
*/
public boolean hasDelay() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool delay = 1;</code>
*/
public boolean getDelay() {
return delay_;
}
@ -122,8 +201,12 @@ public final class TestDelayedRpcProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasDelay()) {
@ -131,74 +214,61 @@ public final class TestDelayedRpcProtos {
hash = (53 * hash) + hashBoolean(getDelay());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -214,6 +284,9 @@ public final class TestDelayedRpcProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code TestArg}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArgOrBuilder {
@ -224,7 +297,9 @@ public final class TestDelayedRpcProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable;
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.Builder.class);
}
// Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.newBuilder()
@ -232,7 +307,8 @@ public final class TestDelayedRpcProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -257,7 +333,7 @@ public final class TestDelayedRpcProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.getDescriptor();
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestArg_descriptor;
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg getDefaultInstanceForType() {
@ -272,16 +348,6 @@ public final class TestDelayedRpcProtos {
return result;
}
private org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg buildPartial() {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg(this);
int from_bitField0_ = bitField0_;
@ -325,50 +391,47 @@ public final class TestDelayedRpcProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
delay_ = input.readBool();
break;
}
}
}
}
private int bitField0_;
// required bool delay = 1;
private boolean delay_ ;
/**
* <code>required bool delay = 1;</code>
*/
public boolean hasDelay() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required bool delay = 1;</code>
*/
public boolean getDelay() {
return delay_;
}
/**
* <code>required bool delay = 1;</code>
*/
public Builder setDelay(boolean value) {
bitField0_ |= 0x00000001;
delay_ = value;
onChanged();
return this;
}
/**
* <code>required bool delay = 1;</code>
*/
public Builder clearDelay() {
bitField0_ = (bitField0_ & ~0x00000001);
delay_ = false;
@ -391,17 +454,27 @@ public final class TestDelayedRpcProtos {
extends com.google.protobuf.MessageOrBuilder {
// required int32 response = 1;
/**
* <code>required int32 response = 1;</code>
*/
boolean hasResponse();
/**
* <code>required int32 response = 1;</code>
*/
int getResponse();
}
/**
* Protobuf type {@code TestResponse}
*/
public static final class TestResponse extends
com.google.protobuf.GeneratedMessage
implements TestResponseOrBuilder {
// Use TestResponse.newBuilder() to construct.
private TestResponse(Builder builder) {
private TestResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private TestResponse(boolean noInit) {}
private TestResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final TestResponse defaultInstance;
public static TestResponse getDefaultInstance() {
@ -412,6 +485,52 @@ public final class TestDelayedRpcProtos {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TestResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
response_ = input.readInt32();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_descriptor;
@ -419,16 +538,39 @@ public final class TestDelayedRpcProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.Builder.class);
}
public static com.google.protobuf.Parser<TestResponse> PARSER =
new com.google.protobuf.AbstractParser<TestResponse>() {
public TestResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TestResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<TestResponse> getParserForType() {
return PARSER;
}
private int bitField0_;
// required int32 response = 1;
public static final int RESPONSE_FIELD_NUMBER = 1;
private int response_;
/**
* <code>required int32 response = 1;</code>
*/
public boolean hasResponse() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 response = 1;</code>
*/
public int getResponse() {
return response_;
}
@ -501,8 +643,12 @@ public final class TestDelayedRpcProtos {
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasResponse()) {
@ -510,74 +656,61 @@ public final class TestDelayedRpcProtos {
hash = (53 * hash) + getResponse();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
@ -593,6 +726,9 @@ public final class TestDelayedRpcProtos {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code TestResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponseOrBuilder {
@ -603,7 +739,9 @@ public final class TestDelayedRpcProtos {
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class, org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.newBuilder()
@ -611,7 +749,8 @@ public final class TestDelayedRpcProtos {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -636,7 +775,7 @@ public final class TestDelayedRpcProtos {
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.getDescriptor();
return org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.internal_static_TestResponse_descriptor;
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse getDefaultInstanceForType() {
@ -651,16 +790,6 @@ public final class TestDelayedRpcProtos {
return result;
}
private org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse buildPartial() {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse result = new org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse(this);
int from_bitField0_ = bitField0_;
@ -704,50 +833,47 @@ public final class TestDelayedRpcProtos {
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
response_ = input.readInt32();
break;
}
}
}
}
private int bitField0_;
// required int32 response = 1;
private int response_ ;
/**
* <code>required int32 response = 1;</code>
*/
public boolean hasResponse() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required int32 response = 1;</code>
*/
public int getResponse() {
return response_;
}
/**
* <code>required int32 response = 1;</code>
*/
public Builder setResponse(int value) {
bitField0_ |= 0x00000001;
response_ = value;
onChanged();
return this;
}
/**
* <code>required int32 response = 1;</code>
*/
public Builder clearResponse() {
bitField0_ = (bitField0_ & ~0x00000001);
response_ = 0;
@ -766,11 +892,17 @@ public final class TestDelayedRpcProtos {
// @@protoc_insertion_point(class_scope:TestResponse)
}
/**
* Protobuf service {@code TestDelayedService}
*/
public static abstract class TestDelayedService
implements com.google.protobuf.Service {
protected TestDelayedService() {}
public interface Interface {
/**
* <code>rpc test(.TestArg) returns (.TestResponse);</code>
*/
public abstract void test(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg request,
@ -853,6 +985,9 @@ public final class TestDelayedRpcProtos {
};
}
/**
* <code>rpc test(.TestArg) returns (.TestResponse);</code>
*/
public abstract void test(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg request,
@ -985,6 +1120,8 @@ public final class TestDelayedRpcProtos {
}
}
// @@protoc_insertion_point(class_scope:TestDelayedService)
}
private static com.google.protobuf.Descriptors.Descriptor
@ -1023,17 +1160,13 @@ public final class TestDelayedRpcProtos {
internal_static_TestArg_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_TestArg_descriptor,
new java.lang.String[] { "Delay", },
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.class,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestArg.Builder.class);
new java.lang.String[] { "Delay", });
internal_static_TestResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_TestResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_TestResponse_descriptor,
new java.lang.String[] { "Response", },
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.class,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestDelayedRpcProtos.TestResponse.Builder.class);
new java.lang.String[] { "Response", });
return null;
}
};

View File

@ -8,21 +8,38 @@ public final class TestRpcServiceProtos {
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf service {@code TestProtobufRpcProto}
*
* <pre>
**
* A protobuf service for use in tests
* </pre>
*/
public static abstract class TestProtobufRpcProto
implements com.google.protobuf.Service {
protected TestProtobufRpcProto() {}
public interface Interface {
/**
* <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
/**
* <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
/**
* <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
@ -133,16 +150,25 @@ public final class TestRpcServiceProtos {
};
}
/**
* <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
/**
* <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
/**
* <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
@ -357,6 +383,8 @@ public final class TestRpcServiceProtos {
}
}
// @@protoc_insertion_point(class_scope:TestProtobufRpcProto)
}

View File

@ -365,6 +365,10 @@
</developer>
</developers>
<repositories>
<repository>
<id>Arun Staging 2.1.0-beta RCs</id>
<url>https://repository.apache.org/content/repositories/orgapachehadoop-099/</url>
</repository>
<repository>
<id>cloudbees netty</id>
<url>http://repository-netty.forge.cloudbees.com/snapshot/</url>
@ -881,7 +885,7 @@
<buildDate>${maven.build.timestamp}</buildDate>
<compileSource>1.6</compileSource>
<!-- Dependencies -->
<hadoop-two.version>2.0.5-alpha</hadoop-two.version>
<hadoop-two.version>2.1.0-beta</hadoop-two.version>
<hadoop-one.version>1.2.1</hadoop-one.version>
<commons-cli.version>1.2</commons-cli.version>
<commons-codec.version>1.7</commons-codec.version>
@ -905,7 +909,7 @@
<htrace.version>2.00</htrace.version>
<log4j.version>1.2.17</log4j.version>
<mockito-all.version>1.9.0</mockito-all.version>
<protobuf.version>2.4.1</protobuf.version>
<protobuf.version>2.5.0</protobuf.version>
<stax-api.version>1.0.1</stax-api.version>
<thrift.version>0.9.0</thrift.version>
<zookeeper.version>3.4.5</zookeeper.version>