HBASE-8165 Update our protobuf to 2.5 from 2.4.1; REVERT
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1466759 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6d32cc0881
commit
d1b3505b26
|
@ -326,7 +326,7 @@ public class ServerName implements Comparable<ServerName> {
|
|||
int prefixLen = ProtobufUtil.lengthOfPBMagic();
|
||||
try {
|
||||
RootRegionServer rss =
|
||||
RootRegionServer.PARSER.parseFrom(data, prefixLen, data.length - prefixLen);
|
||||
RootRegionServer.newBuilder().mergeFrom(data, prefixLen, data.length - prefixLen).build();
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName sn = rss.getServer();
|
||||
return new ServerName(sn.getHostName(), sn.getPort(), sn.getStartCode());
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
|
|
|
@ -41,7 +41,6 @@ import com.google.common.collect.Lists;
|
|||
import com.google.protobuf.ByteString;
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
import com.google.protobuf.Message;
|
||||
import com.google.protobuf.Parser;
|
||||
import com.google.protobuf.RpcChannel;
|
||||
import com.google.protobuf.Service;
|
||||
import com.google.protobuf.ServiceException;
|
||||
|
@ -1835,19 +1834,17 @@ public final class ProtobufUtil {
|
|||
}
|
||||
|
||||
public static ScanMetrics toScanMetrics(final byte[] bytes) {
|
||||
Parser<MapReduceProtos.ScanMetrics> parser = MapReduceProtos.ScanMetrics.PARSER;
|
||||
MapReduceProtos.ScanMetrics pScanMetrics = null;
|
||||
MapReduceProtos.ScanMetrics.Builder builder = MapReduceProtos.ScanMetrics.newBuilder();
|
||||
try {
|
||||
pScanMetrics = parser.parseFrom(bytes);
|
||||
builder.mergeFrom(bytes);
|
||||
} catch (InvalidProtocolBufferException e) {
|
||||
//Ignored there are just no key values to add.
|
||||
}
|
||||
MapReduceProtos.ScanMetrics pScanMetrics = builder.build();
|
||||
ScanMetrics scanMetrics = new ScanMetrics();
|
||||
if (pScanMetrics != null) {
|
||||
for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) {
|
||||
if (pair.hasName() && pair.hasValue()) {
|
||||
scanMetrics.setCounter(pair.getName(), pair.getValue());
|
||||
}
|
||||
for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) {
|
||||
if (pair.hasName() && pair.hasValue()) {
|
||||
scanMetrics.setCounter(pair.getName(), pair.getValue());
|
||||
}
|
||||
}
|
||||
return scanMetrics;
|
||||
|
|
|
@ -819,34 +819,24 @@ public final class RequestConverter {
|
|||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see {@link #buildRollWALWriterRequest()
|
||||
*/
|
||||
private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST =
|
||||
RollWALWriterRequest.newBuilder().build();
|
||||
|
||||
/**
|
||||
* Create a new RollWALWriterRequest
|
||||
*
|
||||
* @return a ReplicateWALEntryRequest
|
||||
*/
|
||||
public static RollWALWriterRequest buildRollWALWriterRequest() {
|
||||
return ROLL_WAL_WRITER_REQUEST;
|
||||
RollWALWriterRequest.Builder builder = RollWALWriterRequest.newBuilder();
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see {@link #buildGetServerInfoRequest()}
|
||||
*/
|
||||
private static GetServerInfoRequest GET_SERVER_INFO_REQUEST =
|
||||
GetServerInfoRequest.newBuilder().build();
|
||||
|
||||
/**
|
||||
* Create a new GetServerInfoRequest
|
||||
*
|
||||
* @return a GetServerInfoRequest
|
||||
*/
|
||||
public static GetServerInfoRequest buildGetServerInfoRequest() {
|
||||
return GET_SERVER_INFO_REQUEST;
|
||||
GetServerInfoRequest.Builder builder = GetServerInfoRequest.newBuilder();
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1149,33 +1139,21 @@ public final class RequestConverter {
|
|||
return SetBalancerRunningRequest.newBuilder().setOn(on).setSynchronous(synchronous).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see {@link #buildGetClusterStatusRequest}
|
||||
*/
|
||||
private static final GetClusterStatusRequest GET_CLUSTER_STATUS_REQUEST =
|
||||
GetClusterStatusRequest.newBuilder().build();
|
||||
|
||||
/**
|
||||
* Creates a protocol buffer GetClusterStatusRequest
|
||||
*
|
||||
* @return A GetClusterStatusRequest
|
||||
*/
|
||||
public static GetClusterStatusRequest buildGetClusterStatusRequest() {
|
||||
return GET_CLUSTER_STATUS_REQUEST;
|
||||
return GetClusterStatusRequest.newBuilder().build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see {@link #buildCatalogScanRequest}
|
||||
*/
|
||||
private static final CatalogScanRequest CATALOG_SCAN_REQUEST =
|
||||
CatalogScanRequest.newBuilder().build();
|
||||
|
||||
/**
|
||||
* Creates a request for running a catalog scan
|
||||
* @return A {@link CatalogScanRequest}
|
||||
*/
|
||||
public static CatalogScanRequest buildCatalogScanRequest() {
|
||||
return CATALOG_SCAN_REQUEST;
|
||||
return CatalogScanRequest.newBuilder().build();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1186,18 +1164,12 @@ public final class RequestConverter {
|
|||
return EnableCatalogJanitorRequest.newBuilder().setEnable(enable).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see {@link #buildIsCatalogJanitorEnabledRequest()}
|
||||
*/
|
||||
private static final IsCatalogJanitorEnabledRequest IS_CATALOG_JANITOR_ENABLED_REQUEST =
|
||||
IsCatalogJanitorEnabledRequest.newBuilder().build();
|
||||
|
||||
/**
|
||||
* Creates a request for querying the master whether the catalog janitor is enabled
|
||||
* @return A {@link IsCatalogJanitorEnabledRequest}
|
||||
*/
|
||||
public static IsCatalogJanitorEnabledRequest buildIsCatalogJanitorEnabledRequest() {
|
||||
return IS_CATALOG_JANITOR_ENABLED_REQUEST;
|
||||
return IsCatalogJanitorEnabledRequest.newBuilder().build();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1295,4 +1267,4 @@ public final class RequestConverter {
|
|||
}
|
||||
return builder.build();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -10,191 +10,72 @@ public final class ClusterIdProtos {
|
|||
}
|
||||
public interface ClusterIdOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
|
||||
// required string clusterId = 1;
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
boolean hasClusterId();
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
java.lang.String getClusterId();
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
com.google.protobuf.ByteString
|
||||
getClusterIdBytes();
|
||||
String getClusterId();
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code ClusterId}
|
||||
*
|
||||
* <pre>
|
||||
**
|
||||
* Content of the '/hbase/hbaseid', cluster id, znode.
|
||||
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
|
||||
* </pre>
|
||||
*/
|
||||
public static final class ClusterId extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements ClusterIdOrBuilder {
|
||||
// Use ClusterId.newBuilder() to construct.
|
||||
private ClusterId(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
||||
private ClusterId(Builder builder) {
|
||||
super(builder);
|
||||
this.unknownFields = builder.getUnknownFields();
|
||||
}
|
||||
private ClusterId(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
||||
|
||||
private ClusterId(boolean noInit) {}
|
||||
|
||||
private static final ClusterId defaultInstance;
|
||||
public static ClusterId getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
|
||||
public ClusterId getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
private final com.google.protobuf.UnknownFieldSet unknownFields;
|
||||
@java.lang.Override
|
||||
public final com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return this.unknownFields;
|
||||
}
|
||||
private ClusterId(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
initFields();
|
||||
int mutable_bitField0_ = 0;
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder();
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
bitField0_ |= 0x00000001;
|
||||
clusterId_ = input.readBytes();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new com.google.protobuf.InvalidProtocolBufferException(
|
||||
e.getMessage()).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
this.unknownFields = unknownFields.build();
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static com.google.protobuf.Parser<ClusterId> PARSER =
|
||||
new com.google.protobuf.AbstractParser<ClusterId>() {
|
||||
public ClusterId parsePartialFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new ClusterId(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
@java.lang.Override
|
||||
public com.google.protobuf.Parser<ClusterId> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
|
||||
private int bitField0_;
|
||||
// required string clusterId = 1;
|
||||
public static final int CLUSTERID_FIELD_NUMBER = 1;
|
||||
private java.lang.Object clusterId_;
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
public boolean hasClusterId() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
public java.lang.String getClusterId() {
|
||||
public String getClusterId() {
|
||||
java.lang.Object ref = clusterId_;
|
||||
if (ref instanceof java.lang.String) {
|
||||
return (java.lang.String) ref;
|
||||
if (ref instanceof String) {
|
||||
return (String) ref;
|
||||
} else {
|
||||
com.google.protobuf.ByteString bs =
|
||||
(com.google.protobuf.ByteString) ref;
|
||||
java.lang.String s = bs.toStringUtf8();
|
||||
if (bs.isValidUtf8()) {
|
||||
String s = bs.toStringUtf8();
|
||||
if (com.google.protobuf.Internal.isValidUtf8(bs)) {
|
||||
clusterId_ = s;
|
||||
}
|
||||
return s;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
public com.google.protobuf.ByteString
|
||||
getClusterIdBytes() {
|
||||
private com.google.protobuf.ByteString getClusterIdBytes() {
|
||||
java.lang.Object ref = clusterId_;
|
||||
if (ref instanceof java.lang.String) {
|
||||
if (ref instanceof String) {
|
||||
com.google.protobuf.ByteString b =
|
||||
com.google.protobuf.ByteString.copyFromUtf8(
|
||||
(java.lang.String) ref);
|
||||
com.google.protobuf.ByteString.copyFromUtf8((String) ref);
|
||||
clusterId_ = b;
|
||||
return b;
|
||||
} else {
|
||||
return (com.google.protobuf.ByteString) ref;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void initFields() {
|
||||
clusterId_ = "";
|
||||
}
|
||||
|
@ -202,7 +83,7 @@ public final class ClusterIdProtos {
|
|||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized != -1) return isInitialized == 1;
|
||||
|
||||
|
||||
if (!hasClusterId()) {
|
||||
memoizedIsInitialized = 0;
|
||||
return false;
|
||||
|
@ -210,7 +91,7 @@ public final class ClusterIdProtos {
|
|||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
|
@ -219,12 +100,12 @@ public final class ClusterIdProtos {
|
|||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
|
||||
size = 0;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
|
@ -234,14 +115,14 @@ public final class ClusterIdProtos {
|
|||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
protected java.lang.Object writeReplace()
|
||||
throws java.io.ObjectStreamException {
|
||||
return super.writeReplace();
|
||||
}
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
|
@ -251,7 +132,7 @@ public final class ClusterIdProtos {
|
|||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) obj;
|
||||
|
||||
|
||||
boolean result = true;
|
||||
result = result && (hasClusterId() == other.hasClusterId());
|
||||
if (hasClusterId()) {
|
||||
|
@ -262,13 +143,9 @@ public final class ClusterIdProtos {
|
|||
getUnknownFields().equals(other.getUnknownFields());
|
||||
return result;
|
||||
}
|
||||
|
||||
private int memoizedHashCode = 0;
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptorForType().hashCode();
|
||||
if (hasClusterId()) {
|
||||
|
@ -276,85 +153,89 @@ public final class ClusterIdProtos {
|
|||
hash = (53 * hash) + getClusterId().hashCode();
|
||||
}
|
||||
hash = (29 * hash) + getUnknownFields().hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code ClusterId}
|
||||
*
|
||||
* <pre>
|
||||
**
|
||||
* Content of the '/hbase/hbaseid', cluster id, znode.
|
||||
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
|
||||
* </pre>
|
||||
*/
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
||||
implements org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder {
|
||||
|
@ -362,21 +243,18 @@ public final class ClusterIdProtos {
|
|||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable;
|
||||
}
|
||||
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
|
||||
private Builder(BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
@ -387,27 +265,27 @@ public final class ClusterIdProtos {
|
|||
private static Builder create() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
clusterId_ = "";
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public Builder clone() {
|
||||
return create().mergeFrom(buildPartial());
|
||||
}
|
||||
|
||||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDescriptor();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId build() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
|
@ -415,7 +293,17 @@ public final class ClusterIdProtos {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildPartial() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId(this);
|
||||
int from_bitField0_ = bitField0_;
|
||||
|
@ -428,7 +316,7 @@ public final class ClusterIdProtos {
|
|||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId)other);
|
||||
|
@ -437,18 +325,16 @@ public final class ClusterIdProtos {
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other) {
|
||||
if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) return this;
|
||||
if (other.hasClusterId()) {
|
||||
bitField0_ |= 0x00000001;
|
||||
clusterId_ = other.clusterId_;
|
||||
onChanged();
|
||||
setClusterId(other.getClusterId());
|
||||
}
|
||||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public final boolean isInitialized() {
|
||||
if (!hasClusterId()) {
|
||||
|
||||
|
@ -456,85 +342,57 @@ public final class ClusterIdProtos {
|
|||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) e.getUnfinishedMessage();
|
||||
throw e;
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
this.getUnknownFields());
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
bitField0_ |= 0x00000001;
|
||||
clusterId_ = input.readBytes();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private int bitField0_;
|
||||
|
||||
|
||||
// required string clusterId = 1;
|
||||
private java.lang.Object clusterId_ = "";
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
public boolean hasClusterId() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
public java.lang.String getClusterId() {
|
||||
public String getClusterId() {
|
||||
java.lang.Object ref = clusterId_;
|
||||
if (!(ref instanceof java.lang.String)) {
|
||||
java.lang.String s = ((com.google.protobuf.ByteString) ref)
|
||||
.toStringUtf8();
|
||||
if (!(ref instanceof String)) {
|
||||
String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
|
||||
clusterId_ = s;
|
||||
return s;
|
||||
} else {
|
||||
return (java.lang.String) ref;
|
||||
return (String) ref;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
public com.google.protobuf.ByteString
|
||||
getClusterIdBytes() {
|
||||
java.lang.Object ref = clusterId_;
|
||||
if (ref instanceof String) {
|
||||
com.google.protobuf.ByteString b =
|
||||
com.google.protobuf.ByteString.copyFromUtf8(
|
||||
(java.lang.String) ref);
|
||||
clusterId_ = b;
|
||||
return b;
|
||||
} else {
|
||||
return (com.google.protobuf.ByteString) ref;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
public Builder setClusterId(
|
||||
java.lang.String value) {
|
||||
public Builder setClusterId(String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
@ -543,54 +401,35 @@ public final class ClusterIdProtos {
|
|||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
public Builder clearClusterId() {
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
clusterId_ = getDefaultInstance().getClusterId();
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>required string clusterId = 1;</code>
|
||||
*
|
||||
* <pre>
|
||||
* This is the cluster id, a uuid as a String
|
||||
* </pre>
|
||||
*/
|
||||
public Builder setClusterIdBytes(
|
||||
com.google.protobuf.ByteString value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
bitField0_ |= 0x00000001;
|
||||
void setClusterId(com.google.protobuf.ByteString value) {
|
||||
bitField0_ |= 0x00000001;
|
||||
clusterId_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:ClusterId)
|
||||
}
|
||||
|
||||
|
||||
static {
|
||||
defaultInstance = new ClusterId(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(class_scope:ClusterId)
|
||||
}
|
||||
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_ClusterId_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_ClusterId_fieldAccessorTable;
|
||||
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
|
@ -613,7 +452,9 @@ public final class ClusterIdProtos {
|
|||
internal_static_ClusterId_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_ClusterId_descriptor,
|
||||
new java.lang.String[] { "ClusterId", });
|
||||
new java.lang.String[] { "ClusterId", },
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class,
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
@ -622,6 +463,6 @@ public final class ClusterIdProtos {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -10,129 +10,50 @@ public final class LoadBalancerProtos {
|
|||
}
|
||||
public interface LoadBalancerStateOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
|
||||
// optional bool balancerOn = 1;
|
||||
/**
|
||||
* <code>optional bool balancerOn = 1;</code>
|
||||
*/
|
||||
boolean hasBalancerOn();
|
||||
/**
|
||||
* <code>optional bool balancerOn = 1;</code>
|
||||
*/
|
||||
boolean getBalancerOn();
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code LoadBalancerState}
|
||||
*/
|
||||
public static final class LoadBalancerState extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements LoadBalancerStateOrBuilder {
|
||||
// Use LoadBalancerState.newBuilder() to construct.
|
||||
private LoadBalancerState(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
||||
private LoadBalancerState(Builder builder) {
|
||||
super(builder);
|
||||
this.unknownFields = builder.getUnknownFields();
|
||||
}
|
||||
private LoadBalancerState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
||||
|
||||
private LoadBalancerState(boolean noInit) {}
|
||||
|
||||
private static final LoadBalancerState defaultInstance;
|
||||
public static LoadBalancerState getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
|
||||
public LoadBalancerState getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
private final com.google.protobuf.UnknownFieldSet unknownFields;
|
||||
@java.lang.Override
|
||||
public final com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return this.unknownFields;
|
||||
}
|
||||
private LoadBalancerState(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
initFields();
|
||||
int mutable_bitField0_ = 0;
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder();
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 8: {
|
||||
bitField0_ |= 0x00000001;
|
||||
balancerOn_ = input.readBool();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new com.google.protobuf.InvalidProtocolBufferException(
|
||||
e.getMessage()).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
this.unknownFields = unknownFields.build();
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static com.google.protobuf.Parser<LoadBalancerState> PARSER =
|
||||
new com.google.protobuf.AbstractParser<LoadBalancerState>() {
|
||||
public LoadBalancerState parsePartialFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new LoadBalancerState(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
@java.lang.Override
|
||||
public com.google.protobuf.Parser<LoadBalancerState> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
|
||||
private int bitField0_;
|
||||
// optional bool balancerOn = 1;
|
||||
public static final int BALANCERON_FIELD_NUMBER = 1;
|
||||
private boolean balancerOn_;
|
||||
/**
|
||||
* <code>optional bool balancerOn = 1;</code>
|
||||
*/
|
||||
public boolean hasBalancerOn() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
/**
|
||||
* <code>optional bool balancerOn = 1;</code>
|
||||
*/
|
||||
public boolean getBalancerOn() {
|
||||
return balancerOn_;
|
||||
}
|
||||
|
||||
|
||||
private void initFields() {
|
||||
balancerOn_ = false;
|
||||
}
|
||||
|
@ -140,11 +61,11 @@ public final class LoadBalancerProtos {
|
|||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized != -1) return isInitialized == 1;
|
||||
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
|
@ -153,12 +74,12 @@ public final class LoadBalancerProtos {
|
|||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
|
||||
size = 0;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
|
@ -168,14 +89,14 @@ public final class LoadBalancerProtos {
|
|||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
protected java.lang.Object writeReplace()
|
||||
throws java.io.ObjectStreamException {
|
||||
return super.writeReplace();
|
||||
}
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
|
@ -185,7 +106,7 @@ public final class LoadBalancerProtos {
|
|||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) obj;
|
||||
|
||||
|
||||
boolean result = true;
|
||||
result = result && (hasBalancerOn() == other.hasBalancerOn());
|
||||
if (hasBalancerOn()) {
|
||||
|
@ -196,13 +117,9 @@ public final class LoadBalancerProtos {
|
|||
getUnknownFields().equals(other.getUnknownFields());
|
||||
return result;
|
||||
}
|
||||
|
||||
private int memoizedHashCode = 0;
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptorForType().hashCode();
|
||||
if (hasBalancerOn()) {
|
||||
|
@ -210,79 +127,89 @@ public final class LoadBalancerProtos {
|
|||
hash = (53 * hash) + hashBoolean(getBalancerOn());
|
||||
}
|
||||
hash = (29 * hash) + getUnknownFields().hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code LoadBalancerState}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
||||
implements org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder {
|
||||
|
@ -290,21 +217,18 @@ public final class LoadBalancerProtos {
|
|||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable;
|
||||
}
|
||||
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
|
||||
private Builder(BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
@ -315,27 +239,27 @@ public final class LoadBalancerProtos {
|
|||
private static Builder create() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
balancerOn_ = false;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public Builder clone() {
|
||||
return create().mergeFrom(buildPartial());
|
||||
}
|
||||
|
||||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
|
||||
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDescriptor();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState build() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
|
@ -343,7 +267,17 @@ public final class LoadBalancerProtos {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildPartial() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = new org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState(this);
|
||||
int from_bitField0_ = bitField0_;
|
||||
|
@ -356,7 +290,7 @@ public final class LoadBalancerProtos {
|
|||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState)other);
|
||||
|
@ -365,7 +299,7 @@ public final class LoadBalancerProtos {
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other) {
|
||||
if (other == org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance()) return this;
|
||||
if (other.hasBalancerOn()) {
|
||||
|
@ -374,80 +308,83 @@ public final class LoadBalancerProtos {
|
|||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) e.getUnfinishedMessage();
|
||||
throw e;
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
this.getUnknownFields());
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 8: {
|
||||
bitField0_ |= 0x00000001;
|
||||
balancerOn_ = input.readBool();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private int bitField0_;
|
||||
|
||||
|
||||
// optional bool balancerOn = 1;
|
||||
private boolean balancerOn_ ;
|
||||
/**
|
||||
* <code>optional bool balancerOn = 1;</code>
|
||||
*/
|
||||
public boolean hasBalancerOn() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
/**
|
||||
* <code>optional bool balancerOn = 1;</code>
|
||||
*/
|
||||
public boolean getBalancerOn() {
|
||||
return balancerOn_;
|
||||
}
|
||||
/**
|
||||
* <code>optional bool balancerOn = 1;</code>
|
||||
*/
|
||||
public Builder setBalancerOn(boolean value) {
|
||||
bitField0_ |= 0x00000001;
|
||||
balancerOn_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>optional bool balancerOn = 1;</code>
|
||||
*/
|
||||
public Builder clearBalancerOn() {
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
balancerOn_ = false;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:LoadBalancerState)
|
||||
}
|
||||
|
||||
|
||||
static {
|
||||
defaultInstance = new LoadBalancerState(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(class_scope:LoadBalancerState)
|
||||
}
|
||||
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_LoadBalancerState_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_LoadBalancerState_fieldAccessorTable;
|
||||
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
|
@ -471,7 +408,9 @@ public final class LoadBalancerProtos {
|
|||
internal_static_LoadBalancerState_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_LoadBalancerState_descriptor,
|
||||
new java.lang.String[] { "BalancerOn", });
|
||||
new java.lang.String[] { "BalancerOn", },
|
||||
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class,
|
||||
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
@ -480,6 +419,6 @@ public final class LoadBalancerProtos {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -10,169 +10,66 @@ public final class MapReduceProtos {
|
|||
}
|
||||
public interface ScanMetricsOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
|
||||
// repeated .NameInt64Pair metrics = 1;
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair>
|
||||
getMetricsList();
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index);
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
int getMetricsCount();
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
|
||||
getMetricsOrBuilderList();
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
|
||||
int index);
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code ScanMetrics}
|
||||
*/
|
||||
public static final class ScanMetrics extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements ScanMetricsOrBuilder {
|
||||
// Use ScanMetrics.newBuilder() to construct.
|
||||
private ScanMetrics(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
||||
private ScanMetrics(Builder builder) {
|
||||
super(builder);
|
||||
this.unknownFields = builder.getUnknownFields();
|
||||
}
|
||||
private ScanMetrics(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
||||
|
||||
private ScanMetrics(boolean noInit) {}
|
||||
|
||||
private static final ScanMetrics defaultInstance;
|
||||
public static ScanMetrics getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
|
||||
public ScanMetrics getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
private final com.google.protobuf.UnknownFieldSet unknownFields;
|
||||
@java.lang.Override
|
||||
public final com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return this.unknownFields;
|
||||
}
|
||||
private ScanMetrics(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
initFields();
|
||||
int mutable_bitField0_ = 0;
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder();
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
metrics_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair>();
|
||||
mutable_bitField0_ |= 0x00000001;
|
||||
}
|
||||
metrics_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER, extensionRegistry));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new com.google.protobuf.InvalidProtocolBufferException(
|
||||
e.getMessage()).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
metrics_ = java.util.Collections.unmodifiableList(metrics_);
|
||||
}
|
||||
this.unknownFields = unknownFields.build();
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static com.google.protobuf.Parser<ScanMetrics> PARSER =
|
||||
new com.google.protobuf.AbstractParser<ScanMetrics>() {
|
||||
public ScanMetrics parsePartialFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new ScanMetrics(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
@java.lang.Override
|
||||
public com.google.protobuf.Parser<ScanMetrics> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
|
||||
// repeated .NameInt64Pair metrics = 1;
|
||||
public static final int METRICS_FIELD_NUMBER = 1;
|
||||
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> metrics_;
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
|
||||
return metrics_;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
|
||||
getMetricsOrBuilderList() {
|
||||
return metrics_;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public int getMetricsCount() {
|
||||
return metrics_.size();
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
|
||||
return metrics_.get(index);
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
|
||||
int index) {
|
||||
return metrics_.get(index);
|
||||
}
|
||||
|
||||
|
||||
private void initFields() {
|
||||
metrics_ = java.util.Collections.emptyList();
|
||||
}
|
||||
|
@ -180,11 +77,11 @@ public final class MapReduceProtos {
|
|||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized != -1) return isInitialized == 1;
|
||||
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
|
@ -193,12 +90,12 @@ public final class MapReduceProtos {
|
|||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
|
||||
size = 0;
|
||||
for (int i = 0; i < metrics_.size(); i++) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
|
@ -208,14 +105,14 @@ public final class MapReduceProtos {
|
|||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
protected java.lang.Object writeReplace()
|
||||
throws java.io.ObjectStreamException {
|
||||
return super.writeReplace();
|
||||
}
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
|
@ -225,7 +122,7 @@ public final class MapReduceProtos {
|
|||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics other = (org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) obj;
|
||||
|
||||
|
||||
boolean result = true;
|
||||
result = result && getMetricsList()
|
||||
.equals(other.getMetricsList());
|
||||
|
@ -233,13 +130,9 @@ public final class MapReduceProtos {
|
|||
getUnknownFields().equals(other.getUnknownFields());
|
||||
return result;
|
||||
}
|
||||
|
||||
private int memoizedHashCode = 0;
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptorForType().hashCode();
|
||||
if (getMetricsCount() > 0) {
|
||||
|
@ -247,79 +140,89 @@ public final class MapReduceProtos {
|
|||
hash = (53 * hash) + getMetricsList().hashCode();
|
||||
}
|
||||
hash = (29 * hash) + getUnknownFields().hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code ScanMetrics}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
||||
implements org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder {
|
||||
|
@ -327,21 +230,18 @@ public final class MapReduceProtos {
|
|||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable;
|
||||
}
|
||||
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
|
||||
private Builder(BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
@ -353,7 +253,7 @@ public final class MapReduceProtos {
|
|||
private static Builder create() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
if (metricsBuilder_ == null) {
|
||||
|
@ -364,20 +264,20 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public Builder clone() {
|
||||
return create().mergeFrom(buildPartial());
|
||||
}
|
||||
|
||||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDescriptor();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics build() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
|
@ -385,7 +285,17 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildPartial() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = new org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics(this);
|
||||
int from_bitField0_ = bitField0_;
|
||||
|
@ -401,7 +311,7 @@ public final class MapReduceProtos {
|
|||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics)other);
|
||||
|
@ -410,7 +320,7 @@ public final class MapReduceProtos {
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics other) {
|
||||
if (other == org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance()) return this;
|
||||
if (metricsBuilder_ == null) {
|
||||
|
@ -442,30 +352,46 @@ public final class MapReduceProtos {
|
|||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) e.getUnfinishedMessage();
|
||||
throw e;
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
this.getUnknownFields());
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.newBuilder();
|
||||
input.readMessage(subBuilder, extensionRegistry);
|
||||
addMetrics(subBuilder.buildPartial());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private int bitField0_;
|
||||
|
||||
|
||||
// repeated .NameInt64Pair metrics = 1;
|
||||
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> metrics_ =
|
||||
java.util.Collections.emptyList();
|
||||
|
@ -475,13 +401,10 @@ public final class MapReduceProtos {
|
|||
bitField0_ |= 0x00000001;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private com.google.protobuf.RepeatedFieldBuilder<
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder> metricsBuilder_;
|
||||
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
|
||||
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
|
||||
if (metricsBuilder_ == null) {
|
||||
return java.util.Collections.unmodifiableList(metrics_);
|
||||
|
@ -489,9 +412,6 @@ public final class MapReduceProtos {
|
|||
return metricsBuilder_.getMessageList();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public int getMetricsCount() {
|
||||
if (metricsBuilder_ == null) {
|
||||
return metrics_.size();
|
||||
|
@ -499,9 +419,6 @@ public final class MapReduceProtos {
|
|||
return metricsBuilder_.getCount();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
|
||||
if (metricsBuilder_ == null) {
|
||||
return metrics_.get(index);
|
||||
|
@ -509,9 +426,6 @@ public final class MapReduceProtos {
|
|||
return metricsBuilder_.getMessage(index);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public Builder setMetrics(
|
||||
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
|
||||
if (metricsBuilder_ == null) {
|
||||
|
@ -526,9 +440,6 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public Builder setMetrics(
|
||||
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
|
||||
if (metricsBuilder_ == null) {
|
||||
|
@ -540,9 +451,6 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public Builder addMetrics(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
|
||||
if (metricsBuilder_ == null) {
|
||||
if (value == null) {
|
||||
|
@ -556,9 +464,6 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public Builder addMetrics(
|
||||
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
|
||||
if (metricsBuilder_ == null) {
|
||||
|
@ -573,9 +478,6 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public Builder addMetrics(
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
|
||||
if (metricsBuilder_ == null) {
|
||||
|
@ -587,9 +489,6 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public Builder addMetrics(
|
||||
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
|
||||
if (metricsBuilder_ == null) {
|
||||
|
@ -601,9 +500,6 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public Builder addAllMetrics(
|
||||
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> values) {
|
||||
if (metricsBuilder_ == null) {
|
||||
|
@ -615,9 +511,6 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public Builder clearMetrics() {
|
||||
if (metricsBuilder_ == null) {
|
||||
metrics_ = java.util.Collections.emptyList();
|
||||
|
@ -628,9 +521,6 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public Builder removeMetrics(int index) {
|
||||
if (metricsBuilder_ == null) {
|
||||
ensureMetricsIsMutable();
|
||||
|
@ -641,16 +531,10 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder getMetricsBuilder(
|
||||
int index) {
|
||||
return getMetricsFieldBuilder().getBuilder(index);
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
|
||||
int index) {
|
||||
if (metricsBuilder_ == null) {
|
||||
|
@ -658,9 +542,6 @@ public final class MapReduceProtos {
|
|||
return metricsBuilder_.getMessageOrBuilder(index);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
|
||||
getMetricsOrBuilderList() {
|
||||
if (metricsBuilder_ != null) {
|
||||
|
@ -669,24 +550,15 @@ public final class MapReduceProtos {
|
|||
return java.util.Collections.unmodifiableList(metrics_);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder() {
|
||||
return getMetricsFieldBuilder().addBuilder(
|
||||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder(
|
||||
int index) {
|
||||
return getMetricsFieldBuilder().addBuilder(
|
||||
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
|
||||
}
|
||||
/**
|
||||
* <code>repeated .NameInt64Pair metrics = 1;</code>
|
||||
*/
|
||||
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder>
|
||||
getMetricsBuilderList() {
|
||||
return getMetricsFieldBuilder().getBuilderList();
|
||||
|
@ -705,24 +577,24 @@ public final class MapReduceProtos {
|
|||
}
|
||||
return metricsBuilder_;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:ScanMetrics)
|
||||
}
|
||||
|
||||
|
||||
static {
|
||||
defaultInstance = new ScanMetrics(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(class_scope:ScanMetrics)
|
||||
}
|
||||
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_ScanMetrics_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_ScanMetrics_fieldAccessorTable;
|
||||
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
|
@ -746,7 +618,9 @@ public final class MapReduceProtos {
|
|||
internal_static_ScanMetrics_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_ScanMetrics_descriptor,
|
||||
new java.lang.String[] { "Metrics", });
|
||||
new java.lang.String[] { "Metrics", },
|
||||
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class,
|
||||
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
@ -756,6 +630,6 @@ public final class MapReduceProtos {
|
|||
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -11,130 +11,69 @@ public final class MultiRowMutationProcessorProtos {
|
|||
public interface MultiRowMutationProcessorRequestOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code MultiRowMutationProcessorRequest}
|
||||
*/
|
||||
public static final class MultiRowMutationProcessorRequest extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements MultiRowMutationProcessorRequestOrBuilder {
|
||||
// Use MultiRowMutationProcessorRequest.newBuilder() to construct.
|
||||
private MultiRowMutationProcessorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
||||
private MultiRowMutationProcessorRequest(Builder builder) {
|
||||
super(builder);
|
||||
this.unknownFields = builder.getUnknownFields();
|
||||
}
|
||||
private MultiRowMutationProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
||||
|
||||
private MultiRowMutationProcessorRequest(boolean noInit) {}
|
||||
|
||||
private static final MultiRowMutationProcessorRequest defaultInstance;
|
||||
public static MultiRowMutationProcessorRequest getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
|
||||
public MultiRowMutationProcessorRequest getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
private final com.google.protobuf.UnknownFieldSet unknownFields;
|
||||
@java.lang.Override
|
||||
public final com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return this.unknownFields;
|
||||
}
|
||||
private MultiRowMutationProcessorRequest(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
initFields();
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder();
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new com.google.protobuf.InvalidProtocolBufferException(
|
||||
e.getMessage()).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
this.unknownFields = unknownFields.build();
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static com.google.protobuf.Parser<MultiRowMutationProcessorRequest> PARSER =
|
||||
new com.google.protobuf.AbstractParser<MultiRowMutationProcessorRequest>() {
|
||||
public MultiRowMutationProcessorRequest parsePartialFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new MultiRowMutationProcessorRequest(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
@java.lang.Override
|
||||
public com.google.protobuf.Parser<MultiRowMutationProcessorRequest> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
|
||||
private void initFields() {
|
||||
}
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized != -1) return isInitialized == 1;
|
||||
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
|
||||
size = 0;
|
||||
size += getUnknownFields().getSerializedSize();
|
||||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
protected java.lang.Object writeReplace()
|
||||
throws java.io.ObjectStreamException {
|
||||
return super.writeReplace();
|
||||
}
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
|
@ -144,95 +83,101 @@ public final class MultiRowMutationProcessorProtos {
|
|||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) obj;
|
||||
|
||||
|
||||
boolean result = true;
|
||||
result = result &&
|
||||
getUnknownFields().equals(other.getUnknownFields());
|
||||
return result;
|
||||
}
|
||||
|
||||
private int memoizedHashCode = 0;
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptorForType().hashCode();
|
||||
hash = (29 * hash) + getUnknownFields().hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code MultiRowMutationProcessorRequest}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
||||
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequestOrBuilder {
|
||||
|
@ -240,21 +185,18 @@ public final class MultiRowMutationProcessorProtos {
|
|||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable;
|
||||
}
|
||||
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
|
||||
private Builder(BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
@ -265,25 +207,25 @@ public final class MultiRowMutationProcessorProtos {
|
|||
private static Builder create() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public Builder clone() {
|
||||
return create().mergeFrom(buildPartial());
|
||||
}
|
||||
|
||||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDescriptor();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDefaultInstance();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest build() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
|
@ -291,13 +233,23 @@ public final class MultiRowMutationProcessorProtos {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildPartial() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest(this);
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest)other);
|
||||
|
@ -306,173 +258,122 @@ public final class MultiRowMutationProcessorProtos {
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest other) {
|
||||
if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDefaultInstance()) return this;
|
||||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) e.getUnfinishedMessage();
|
||||
throw e;
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorRequest)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new MultiRowMutationProcessorRequest(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:MultiRowMutationProcessorRequest)
|
||||
}
|
||||
|
||||
public interface MultiRowMutationProcessorResponseOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code MultiRowMutationProcessorResponse}
|
||||
*/
|
||||
public static final class MultiRowMutationProcessorResponse extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements MultiRowMutationProcessorResponseOrBuilder {
|
||||
// Use MultiRowMutationProcessorResponse.newBuilder() to construct.
|
||||
private MultiRowMutationProcessorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
||||
super(builder);
|
||||
this.unknownFields = builder.getUnknownFields();
|
||||
}
|
||||
private MultiRowMutationProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
||||
|
||||
private static final MultiRowMutationProcessorResponse defaultInstance;
|
||||
public static MultiRowMutationProcessorResponse getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public MultiRowMutationProcessorResponse getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
private final com.google.protobuf.UnknownFieldSet unknownFields;
|
||||
@java.lang.Override
|
||||
public final com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return this.unknownFields;
|
||||
}
|
||||
private MultiRowMutationProcessorResponse(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
initFields();
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder();
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
this.getUnknownFields());
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
done = true;
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new com.google.protobuf.InvalidProtocolBufferException(
|
||||
e.getMessage()).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
this.unknownFields = unknownFields.build();
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorRequest)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new MultiRowMutationProcessorRequest(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:MultiRowMutationProcessorRequest)
|
||||
}
|
||||
|
||||
public interface MultiRowMutationProcessorResponseOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
}
|
||||
public static final class MultiRowMutationProcessorResponse extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements MultiRowMutationProcessorResponseOrBuilder {
|
||||
// Use MultiRowMutationProcessorResponse.newBuilder() to construct.
|
||||
private MultiRowMutationProcessorResponse(Builder builder) {
|
||||
super(builder);
|
||||
}
|
||||
private MultiRowMutationProcessorResponse(boolean noInit) {}
|
||||
|
||||
private static final MultiRowMutationProcessorResponse defaultInstance;
|
||||
public static MultiRowMutationProcessorResponse getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public MultiRowMutationProcessorResponse getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static com.google.protobuf.Parser<MultiRowMutationProcessorResponse> PARSER =
|
||||
new com.google.protobuf.AbstractParser<MultiRowMutationProcessorResponse>() {
|
||||
public MultiRowMutationProcessorResponse parsePartialFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new MultiRowMutationProcessorResponse(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
@java.lang.Override
|
||||
public com.google.protobuf.Parser<MultiRowMutationProcessorResponse> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
|
||||
private void initFields() {
|
||||
}
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized != -1) return isInitialized == 1;
|
||||
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
|
||||
size = 0;
|
||||
size += getUnknownFields().getSerializedSize();
|
||||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
protected java.lang.Object writeReplace()
|
||||
throws java.io.ObjectStreamException {
|
||||
return super.writeReplace();
|
||||
}
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
|
@ -482,95 +383,101 @@ public final class MultiRowMutationProcessorProtos {
|
|||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) obj;
|
||||
|
||||
|
||||
boolean result = true;
|
||||
result = result &&
|
||||
getUnknownFields().equals(other.getUnknownFields());
|
||||
return result;
|
||||
}
|
||||
|
||||
private int memoizedHashCode = 0;
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptorForType().hashCode();
|
||||
hash = (29 * hash) + getUnknownFields().hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code MultiRowMutationProcessorResponse}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
||||
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponseOrBuilder {
|
||||
|
@ -578,21 +485,18 @@ public final class MultiRowMutationProcessorProtos {
|
|||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
|
||||
}
|
||||
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
|
||||
private Builder(BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
@ -603,25 +507,25 @@ public final class MultiRowMutationProcessorProtos {
|
|||
private static Builder create() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public Builder clone() {
|
||||
return create().mergeFrom(buildPartial());
|
||||
}
|
||||
|
||||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDescriptor();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDefaultInstance();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse build() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
|
@ -629,13 +533,23 @@ public final class MultiRowMutationProcessorProtos {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildPartial() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse(this);
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse)other);
|
||||
|
@ -644,46 +558,56 @@ public final class MultiRowMutationProcessorProtos {
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse other) {
|
||||
if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDefaultInstance()) return this;
|
||||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) e.getUnfinishedMessage();
|
||||
throw e;
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
this.getUnknownFields());
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorResponse)
|
||||
}
|
||||
|
||||
|
||||
static {
|
||||
defaultInstance = new MultiRowMutationProcessorResponse(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(class_scope:MultiRowMutationProcessorResponse)
|
||||
}
|
||||
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_MultiRowMutationProcessorRequest_descriptor;
|
||||
private static
|
||||
|
@ -694,7 +618,7 @@ public final class MultiRowMutationProcessorProtos {
|
|||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
|
||||
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
|
@ -719,13 +643,17 @@ public final class MultiRowMutationProcessorProtos {
|
|||
internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_MultiRowMutationProcessorRequest_descriptor,
|
||||
new java.lang.String[] { });
|
||||
new java.lang.String[] { },
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class,
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
|
||||
internal_static_MultiRowMutationProcessorResponse_descriptor =
|
||||
getDescriptor().getMessageTypes().get(1);
|
||||
internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_MultiRowMutationProcessorResponse_descriptor,
|
||||
new java.lang.String[] { });
|
||||
new java.lang.String[] { },
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class,
|
||||
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
@ -734,6 +662,6 @@ public final class MultiRowMutationProcessorProtos {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -10,168 +10,64 @@ public final class Tracing {
|
|||
}
|
||||
public interface RPCTInfoOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
|
||||
// optional int64 traceId = 1;
|
||||
/**
|
||||
* <code>optional int64 traceId = 1;</code>
|
||||
*/
|
||||
boolean hasTraceId();
|
||||
/**
|
||||
* <code>optional int64 traceId = 1;</code>
|
||||
*/
|
||||
long getTraceId();
|
||||
|
||||
|
||||
// optional int64 parentId = 2;
|
||||
/**
|
||||
* <code>optional int64 parentId = 2;</code>
|
||||
*/
|
||||
boolean hasParentId();
|
||||
/**
|
||||
* <code>optional int64 parentId = 2;</code>
|
||||
*/
|
||||
long getParentId();
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code RPCTInfo}
|
||||
*
|
||||
* <pre>
|
||||
*Used to pass through the information necessary to continue
|
||||
*a trace after an RPC is made. All we need is the traceid
|
||||
*(so we know the overarching trace this message is a part of), and
|
||||
*the id of the current span when this message was sent, so we know
|
||||
*what span caused the new span we will create when this message is received.
|
||||
* </pre>
|
||||
*/
|
||||
public static final class RPCTInfo extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements RPCTInfoOrBuilder {
|
||||
// Use RPCTInfo.newBuilder() to construct.
|
||||
private RPCTInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
||||
private RPCTInfo(Builder builder) {
|
||||
super(builder);
|
||||
this.unknownFields = builder.getUnknownFields();
|
||||
}
|
||||
private RPCTInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
||||
|
||||
private RPCTInfo(boolean noInit) {}
|
||||
|
||||
private static final RPCTInfo defaultInstance;
|
||||
public static RPCTInfo getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
|
||||
public RPCTInfo getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
private final com.google.protobuf.UnknownFieldSet unknownFields;
|
||||
@java.lang.Override
|
||||
public final com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return this.unknownFields;
|
||||
}
|
||||
private RPCTInfo(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
initFields();
|
||||
int mutable_bitField0_ = 0;
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder();
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 8: {
|
||||
bitField0_ |= 0x00000001;
|
||||
traceId_ = input.readInt64();
|
||||
break;
|
||||
}
|
||||
case 16: {
|
||||
bitField0_ |= 0x00000002;
|
||||
parentId_ = input.readInt64();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new com.google.protobuf.InvalidProtocolBufferException(
|
||||
e.getMessage()).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
this.unknownFields = unknownFields.build();
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static com.google.protobuf.Parser<RPCTInfo> PARSER =
|
||||
new com.google.protobuf.AbstractParser<RPCTInfo>() {
|
||||
public RPCTInfo parsePartialFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new RPCTInfo(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
@java.lang.Override
|
||||
public com.google.protobuf.Parser<RPCTInfo> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
|
||||
private int bitField0_;
|
||||
// optional int64 traceId = 1;
|
||||
public static final int TRACEID_FIELD_NUMBER = 1;
|
||||
private long traceId_;
|
||||
/**
|
||||
* <code>optional int64 traceId = 1;</code>
|
||||
*/
|
||||
public boolean hasTraceId() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
/**
|
||||
* <code>optional int64 traceId = 1;</code>
|
||||
*/
|
||||
public long getTraceId() {
|
||||
return traceId_;
|
||||
}
|
||||
|
||||
|
||||
// optional int64 parentId = 2;
|
||||
public static final int PARENTID_FIELD_NUMBER = 2;
|
||||
private long parentId_;
|
||||
/**
|
||||
* <code>optional int64 parentId = 2;</code>
|
||||
*/
|
||||
public boolean hasParentId() {
|
||||
return ((bitField0_ & 0x00000002) == 0x00000002);
|
||||
}
|
||||
/**
|
||||
* <code>optional int64 parentId = 2;</code>
|
||||
*/
|
||||
public long getParentId() {
|
||||
return parentId_;
|
||||
}
|
||||
|
||||
|
||||
private void initFields() {
|
||||
traceId_ = 0L;
|
||||
parentId_ = 0L;
|
||||
|
@ -180,11 +76,11 @@ public final class Tracing {
|
|||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized != -1) return isInitialized == 1;
|
||||
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
|
@ -196,12 +92,12 @@ public final class Tracing {
|
|||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
|
||||
size = 0;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
|
@ -215,14 +111,14 @@ public final class Tracing {
|
|||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
protected java.lang.Object writeReplace()
|
||||
throws java.io.ObjectStreamException {
|
||||
return super.writeReplace();
|
||||
}
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
|
@ -232,7 +128,7 @@ public final class Tracing {
|
|||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) obj;
|
||||
|
||||
|
||||
boolean result = true;
|
||||
result = result && (hasTraceId() == other.hasTraceId());
|
||||
if (hasTraceId()) {
|
||||
|
@ -248,13 +144,9 @@ public final class Tracing {
|
|||
getUnknownFields().equals(other.getUnknownFields());
|
||||
return result;
|
||||
}
|
||||
|
||||
private int memoizedHashCode = 0;
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
if (memoizedHashCode != 0) {
|
||||
return memoizedHashCode;
|
||||
}
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptorForType().hashCode();
|
||||
if (hasTraceId()) {
|
||||
|
@ -266,87 +158,89 @@ public final class Tracing {
|
|||
hash = (53 * hash) + hashLong(getParentId());
|
||||
}
|
||||
hash = (29 * hash) + getUnknownFields().hashCode();
|
||||
memoizedHashCode = hash;
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code RPCTInfo}
|
||||
*
|
||||
* <pre>
|
||||
*Used to pass through the information necessary to continue
|
||||
*a trace after an RPC is made. All we need is the traceid
|
||||
*(so we know the overarching trace this message is a part of), and
|
||||
*the id of the current span when this message was sent, so we know
|
||||
*what span caused the new span we will create when this message is received.
|
||||
* </pre>
|
||||
*/
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
||||
implements org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder {
|
||||
|
@ -354,21 +248,18 @@ public final class Tracing {
|
|||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
|
||||
}
|
||||
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
|
||||
private Builder(BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
@ -379,7 +270,7 @@ public final class Tracing {
|
|||
private static Builder create() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
traceId_ = 0L;
|
||||
|
@ -388,20 +279,20 @@ public final class Tracing {
|
|||
bitField0_ = (bitField0_ & ~0x00000002);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public Builder clone() {
|
||||
return create().mergeFrom(buildPartial());
|
||||
}
|
||||
|
||||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDescriptor();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo build() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
|
@ -409,7 +300,17 @@ public final class Tracing {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildPartial() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo(this);
|
||||
int from_bitField0_ = bitField0_;
|
||||
|
@ -426,7 +327,7 @@ public final class Tracing {
|
|||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)other);
|
||||
|
@ -435,7 +336,7 @@ public final class Tracing {
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other) {
|
||||
if (other == org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) return this;
|
||||
if (other.hasTraceId()) {
|
||||
|
@ -447,113 +348,109 @@ public final class Tracing {
|
|||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) e.getUnfinishedMessage();
|
||||
throw e;
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
this.getUnknownFields());
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 8: {
|
||||
bitField0_ |= 0x00000001;
|
||||
traceId_ = input.readInt64();
|
||||
break;
|
||||
}
|
||||
case 16: {
|
||||
bitField0_ |= 0x00000002;
|
||||
parentId_ = input.readInt64();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private int bitField0_;
|
||||
|
||||
|
||||
// optional int64 traceId = 1;
|
||||
private long traceId_ ;
|
||||
/**
|
||||
* <code>optional int64 traceId = 1;</code>
|
||||
*/
|
||||
public boolean hasTraceId() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
/**
|
||||
* <code>optional int64 traceId = 1;</code>
|
||||
*/
|
||||
public long getTraceId() {
|
||||
return traceId_;
|
||||
}
|
||||
/**
|
||||
* <code>optional int64 traceId = 1;</code>
|
||||
*/
|
||||
public Builder setTraceId(long value) {
|
||||
bitField0_ |= 0x00000001;
|
||||
traceId_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>optional int64 traceId = 1;</code>
|
||||
*/
|
||||
public Builder clearTraceId() {
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
traceId_ = 0L;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// optional int64 parentId = 2;
|
||||
private long parentId_ ;
|
||||
/**
|
||||
* <code>optional int64 parentId = 2;</code>
|
||||
*/
|
||||
public boolean hasParentId() {
|
||||
return ((bitField0_ & 0x00000002) == 0x00000002);
|
||||
}
|
||||
/**
|
||||
* <code>optional int64 parentId = 2;</code>
|
||||
*/
|
||||
public long getParentId() {
|
||||
return parentId_;
|
||||
}
|
||||
/**
|
||||
* <code>optional int64 parentId = 2;</code>
|
||||
*/
|
||||
public Builder setParentId(long value) {
|
||||
bitField0_ |= 0x00000002;
|
||||
parentId_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>optional int64 parentId = 2;</code>
|
||||
*/
|
||||
public Builder clearParentId() {
|
||||
bitField0_ = (bitField0_ & ~0x00000002);
|
||||
parentId_ = 0L;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:RPCTInfo)
|
||||
}
|
||||
|
||||
|
||||
static {
|
||||
defaultInstance = new RPCTInfo(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(class_scope:RPCTInfo)
|
||||
}
|
||||
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_RPCTInfo_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_RPCTInfo_fieldAccessorTable;
|
||||
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
|
@ -577,7 +474,9 @@ public final class Tracing {
|
|||
internal_static_RPCTInfo_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_RPCTInfo_descriptor,
|
||||
new java.lang.String[] { "TraceId", "ParentId", });
|
||||
new java.lang.String[] { "TraceId", "ParentId", },
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class,
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
@ -586,6 +485,6 @@ public final class Tracing {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -348,17 +348,19 @@ service MasterAdminService {
|
|||
|
||||
/**
|
||||
* Create a snapshot for the given table.
|
||||
* @param snapshot description of the snapshot to take
|
||||
*/
|
||||
rpc snapshot(TakeSnapshotRequest) returns(TakeSnapshotResponse);
|
||||
|
||||
/**
|
||||
* List completed snapshots.
|
||||
* Return a list of snapshot descriptors for completed snapshots
|
||||
* @return a list of snapshot descriptors for completed snapshots
|
||||
*/
|
||||
rpc getCompletedSnapshots(ListSnapshotRequest) returns(ListSnapshotResponse);
|
||||
|
||||
/**
|
||||
* Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
|
||||
* @param snapshotName snapshot to delete
|
||||
*/
|
||||
rpc deleteSnapshot(DeleteSnapshotRequest) returns(DeleteSnapshotResponse);
|
||||
|
||||
|
@ -369,6 +371,7 @@ service MasterAdminService {
|
|||
|
||||
/**
|
||||
* Restore a snapshot
|
||||
* @param snapshot description of the snapshot to restore
|
||||
*/
|
||||
rpc restoreSnapshot(RestoreSnapshotRequest) returns(RestoreSnapshotResponse);
|
||||
|
||||
|
|
|
@ -119,7 +119,7 @@ import com.google.common.base.Function;
|
|||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import com.google.protobuf.CodedInputStream;
|
||||
import com.google.protobuf.Message;
|
||||
import com.google.protobuf.Parser;
|
||||
import com.google.protobuf.Message.Builder;
|
||||
import com.google.protobuf.TextFormat;
|
||||
// Uses Writables doing sasl
|
||||
|
||||
|
@ -1673,7 +1673,8 @@ public abstract class HBaseServer implements RpcServer {
|
|||
CodedInputStream cis = CodedInputStream.newInstance(buf, offset, buf.length);
|
||||
int headerSize = cis.readRawVarint32();
|
||||
offset = cis.getTotalBytesRead();
|
||||
RequestHeader header = RequestHeader.PARSER.parseFrom(buf, offset, headerSize);
|
||||
RequestHeader header =
|
||||
RequestHeader.newBuilder().mergeFrom(buf, offset, headerSize).build();
|
||||
offset += headerSize;
|
||||
int id = header.getCallId();
|
||||
if (LOG.isDebugEnabled()) {
|
||||
|
@ -1700,13 +1701,15 @@ public abstract class HBaseServer implements RpcServer {
|
|||
Message m = methodCache.getMethodArgType(method);
|
||||
// Check that there is a param to deserialize.
|
||||
if (m != null) {
|
||||
Parser<? extends Message> parser = m.getParserForType();
|
||||
Builder builder = null;
|
||||
builder = m.newBuilderForType();
|
||||
// To read the varint, I need an inputstream; might as well be a CIS.
|
||||
cis = CodedInputStream.newInstance(buf, offset, buf.length);
|
||||
int paramSize = cis.readRawVarint32();
|
||||
offset += cis.getTotalBytesRead();
|
||||
if (parser != null) {
|
||||
param = parser.parseFrom(buf, offset, paramSize);
|
||||
if (builder != null) {
|
||||
builder.mergeFrom(buf, offset, paramSize);
|
||||
param = builder.build();
|
||||
}
|
||||
offset += paramSize;
|
||||
}
|
||||
|
|
|
@ -184,6 +184,7 @@ import com.google.protobuf.Service;
|
|||
@InterfaceAudience.Private
|
||||
public class HRegion implements HeapSize { // , Writable{
|
||||
public static final Log LOG = LogFactory.getLog(HRegion.class);
|
||||
private static final String MERGEDIR = ".merges";
|
||||
|
||||
public static final String LOAD_CFS_ON_DEMAND_CONFIG_KEY =
|
||||
"hbase.hregion.scan.loadColumnFamiliesOnDemand";
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -10,158 +10,54 @@ public final class TableListMessage {
|
|||
}
|
||||
public interface TableListOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
|
||||
// repeated string name = 1;
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
java.util.List<java.lang.String>
|
||||
getNameList();
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
java.util.List<String> getNameList();
|
||||
int getNameCount();
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
java.lang.String getName(int index);
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
com.google.protobuf.ByteString
|
||||
getNameBytes(int index);
|
||||
String getName(int index);
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
|
||||
*/
|
||||
public static final class TableList extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements TableListOrBuilder {
|
||||
// Use TableList.newBuilder() to construct.
|
||||
private TableList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
|
||||
private TableList(Builder builder) {
|
||||
super(builder);
|
||||
this.unknownFields = builder.getUnknownFields();
|
||||
}
|
||||
private TableList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
|
||||
|
||||
private TableList(boolean noInit) {}
|
||||
|
||||
private static final TableList defaultInstance;
|
||||
public static TableList getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
|
||||
public TableList getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
private final com.google.protobuf.UnknownFieldSet unknownFields;
|
||||
@java.lang.Override
|
||||
public final com.google.protobuf.UnknownFieldSet
|
||||
getUnknownFields() {
|
||||
return this.unknownFields;
|
||||
}
|
||||
private TableList(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
initFields();
|
||||
int mutable_bitField0_ = 0;
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder();
|
||||
try {
|
||||
boolean done = false;
|
||||
while (!done) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
done = true;
|
||||
break;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
done = true;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
name_ = new com.google.protobuf.LazyStringArrayList();
|
||||
mutable_bitField0_ |= 0x00000001;
|
||||
}
|
||||
name_.add(input.readBytes());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
throw e.setUnfinishedMessage(this);
|
||||
} catch (java.io.IOException e) {
|
||||
throw new com.google.protobuf.InvalidProtocolBufferException(
|
||||
e.getMessage()).setUnfinishedMessage(this);
|
||||
} finally {
|
||||
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
name_ = new com.google.protobuf.UnmodifiableLazyStringList(name_);
|
||||
}
|
||||
this.unknownFields = unknownFields.build();
|
||||
makeExtensionsImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
|
||||
}
|
||||
|
||||
public static com.google.protobuf.Parser<TableList> PARSER =
|
||||
new com.google.protobuf.AbstractParser<TableList>() {
|
||||
public TableList parsePartialFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return new TableList(input, extensionRegistry);
|
||||
}
|
||||
};
|
||||
|
||||
@java.lang.Override
|
||||
public com.google.protobuf.Parser<TableList> getParserForType() {
|
||||
return PARSER;
|
||||
}
|
||||
|
||||
|
||||
// repeated string name = 1;
|
||||
public static final int NAME_FIELD_NUMBER = 1;
|
||||
private com.google.protobuf.LazyStringList name_;
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public java.util.List<java.lang.String>
|
||||
public java.util.List<String>
|
||||
getNameList() {
|
||||
return name_;
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public int getNameCount() {
|
||||
return name_.size();
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public java.lang.String getName(int index) {
|
||||
public String getName(int index) {
|
||||
return name_.get(index);
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public com.google.protobuf.ByteString
|
||||
getNameBytes(int index) {
|
||||
return name_.getByteString(index);
|
||||
}
|
||||
|
||||
|
||||
private void initFields() {
|
||||
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
||||
}
|
||||
|
@ -169,11 +65,11 @@ public final class TableListMessage {
|
|||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized != -1) return isInitialized == 1;
|
||||
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
|
@ -182,12 +78,12 @@ public final class TableListMessage {
|
|||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
|
||||
size = 0;
|
||||
{
|
||||
int dataSize = 0;
|
||||
|
@ -202,83 +98,94 @@ public final class TableListMessage {
|
|||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
protected java.lang.Object writeReplace()
|
||||
throws java.io.ObjectStreamException {
|
||||
return super.writeReplace();
|
||||
}
|
||||
|
||||
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data);
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return PARSER.parseFrom(data, extensionRegistry);
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseDelimitedFrom(input, extensionRegistry);
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input);
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return PARSER.parseFrom(input, extensionRegistry);
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
/**
|
||||
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
|
||||
*/
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
||||
implements org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableListOrBuilder {
|
||||
|
@ -286,21 +193,18 @@ public final class TableListMessage {
|
|||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
|
||||
}
|
||||
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable
|
||||
.ensureFieldAccessorsInitialized(
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
|
||||
}
|
||||
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
|
||||
private Builder(BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
@ -311,27 +215,27 @@ public final class TableListMessage {
|
|||
private static Builder create() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public Builder clone() {
|
||||
return create().mergeFrom(buildPartial());
|
||||
}
|
||||
|
||||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDescriptor();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDefaultInstance();
|
||||
}
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList build() {
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
|
@ -339,7 +243,17 @@ public final class TableListMessage {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
private org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildPartial() {
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList(this);
|
||||
int from_bitField0_ = bitField0_;
|
||||
|
@ -352,7 +266,7 @@ public final class TableListMessage {
|
|||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList)other);
|
||||
|
@ -361,7 +275,7 @@ public final class TableListMessage {
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList other) {
|
||||
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDefaultInstance()) return this;
|
||||
if (!other.name_.isEmpty()) {
|
||||
|
@ -377,30 +291,45 @@ public final class TableListMessage {
|
|||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parsedMessage = null;
|
||||
try {
|
||||
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
|
||||
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
|
||||
parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList) e.getUnfinishedMessage();
|
||||
throw e;
|
||||
} finally {
|
||||
if (parsedMessage != null) {
|
||||
mergeFrom(parsedMessage);
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
this.getUnknownFields());
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 10: {
|
||||
ensureNameIsMutable();
|
||||
name_.add(input.readBytes());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private int bitField0_;
|
||||
|
||||
|
||||
// repeated string name = 1;
|
||||
private com.google.protobuf.LazyStringList name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
||||
private void ensureNameIsMutable() {
|
||||
|
@ -409,37 +338,18 @@ public final class TableListMessage {
|
|||
bitField0_ |= 0x00000001;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public java.util.List<java.lang.String>
|
||||
public java.util.List<String>
|
||||
getNameList() {
|
||||
return java.util.Collections.unmodifiableList(name_);
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public int getNameCount() {
|
||||
return name_.size();
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public java.lang.String getName(int index) {
|
||||
public String getName(int index) {
|
||||
return name_.get(index);
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public com.google.protobuf.ByteString
|
||||
getNameBytes(int index) {
|
||||
return name_.getByteString(index);
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public Builder setName(
|
||||
int index, java.lang.String value) {
|
||||
int index, String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
@ -448,11 +358,7 @@ public final class TableListMessage {
|
|||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public Builder addName(
|
||||
java.lang.String value) {
|
||||
public Builder addName(String value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
|
@ -461,56 +367,42 @@ public final class TableListMessage {
|
|||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public Builder addAllName(
|
||||
java.lang.Iterable<java.lang.String> values) {
|
||||
java.lang.Iterable<String> values) {
|
||||
ensureNameIsMutable();
|
||||
super.addAll(values, name_);
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public Builder clearName() {
|
||||
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* <code>repeated string name = 1;</code>
|
||||
*/
|
||||
public Builder addNameBytes(
|
||||
com.google.protobuf.ByteString value) {
|
||||
if (value == null) {
|
||||
throw new NullPointerException();
|
||||
}
|
||||
ensureNameIsMutable();
|
||||
void addName(com.google.protobuf.ByteString value) {
|
||||
ensureNameIsMutable();
|
||||
name_.add(value);
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableList)
|
||||
}
|
||||
|
||||
|
||||
static {
|
||||
defaultInstance = new TableList(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableList)
|
||||
}
|
||||
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
|
||||
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
|
@ -533,7 +425,9 @@ public final class TableListMessage {
|
|||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor,
|
||||
new java.lang.String[] { "Name", });
|
||||
new java.lang.String[] { "Name", },
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class,
|
||||
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
@ -542,6 +436,6 @@ public final class TableListMessage {
|
|||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -8,45 +8,28 @@ public final class TestRpcServiceProtos {
|
|||
public static void registerAllExtensions(
|
||||
com.google.protobuf.ExtensionRegistry registry) {
|
||||
}
|
||||
/**
|
||||
* Protobuf service {@code TestProtobufRpcProto}
|
||||
*
|
||||
* <pre>
|
||||
**
|
||||
* A protobuf service for use in tests
|
||||
* </pre>
|
||||
*/
|
||||
public static abstract class TestProtobufRpcProto
|
||||
implements com.google.protobuf.Service {
|
||||
protected TestProtobufRpcProto() {}
|
||||
|
||||
|
||||
public interface Interface {
|
||||
/**
|
||||
* <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
|
||||
*/
|
||||
public abstract void ping(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
|
||||
|
||||
/**
|
||||
* <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
|
||||
*/
|
||||
|
||||
public abstract void echo(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
|
||||
|
||||
/**
|
||||
* <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
|
||||
*/
|
||||
|
||||
public abstract void error(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
public static com.google.protobuf.Service newReflectiveService(
|
||||
final Interface impl) {
|
||||
return new TestProtobufRpcProto() {
|
||||
|
@ -57,7 +40,7 @@ public final class TestRpcServiceProtos {
|
|||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) {
|
||||
impl.ping(controller, request, done);
|
||||
}
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
public void echo(
|
||||
com.google.protobuf.RpcController controller,
|
||||
|
@ -65,7 +48,7 @@ public final class TestRpcServiceProtos {
|
|||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done) {
|
||||
impl.echo(controller, request, done);
|
||||
}
|
||||
|
||||
|
||||
@java.lang.Override
|
||||
public void error(
|
||||
com.google.protobuf.RpcController controller,
|
||||
|
@ -73,10 +56,10 @@ public final class TestRpcServiceProtos {
|
|||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) {
|
||||
impl.error(controller, request, done);
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
public static com.google.protobuf.BlockingService
|
||||
newReflectiveBlockingService(final BlockingInterface impl) {
|
||||
return new com.google.protobuf.BlockingService() {
|
||||
|
@ -84,7 +67,7 @@ public final class TestRpcServiceProtos {
|
|||
getDescriptorForType() {
|
||||
return getDescriptor();
|
||||
}
|
||||
|
||||
|
||||
public final com.google.protobuf.Message callBlockingMethod(
|
||||
com.google.protobuf.Descriptors.MethodDescriptor method,
|
||||
com.google.protobuf.RpcController controller,
|
||||
|
@ -106,7 +89,7 @@ public final class TestRpcServiceProtos {
|
|||
throw new java.lang.AssertionError("Can't get here.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public final com.google.protobuf.Message
|
||||
getRequestPrototype(
|
||||
com.google.protobuf.Descriptors.MethodDescriptor method) {
|
||||
|
@ -126,7 +109,7 @@ public final class TestRpcServiceProtos {
|
|||
throw new java.lang.AssertionError("Can't get here.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public final com.google.protobuf.Message
|
||||
getResponsePrototype(
|
||||
com.google.protobuf.Descriptors.MethodDescriptor method) {
|
||||
|
@ -146,34 +129,25 @@ public final class TestRpcServiceProtos {
|
|||
throw new java.lang.AssertionError("Can't get here.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
|
||||
*/
|
||||
|
||||
public abstract void ping(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
|
||||
|
||||
/**
|
||||
* <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
|
||||
*/
|
||||
|
||||
public abstract void echo(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
|
||||
|
||||
/**
|
||||
* <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
|
||||
*/
|
||||
|
||||
public abstract void error(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
|
||||
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
|
||||
|
||||
|
||||
public static final
|
||||
com.google.protobuf.Descriptors.ServiceDescriptor
|
||||
getDescriptor() {
|
||||
|
@ -183,7 +157,7 @@ public final class TestRpcServiceProtos {
|
|||
getDescriptorForType() {
|
||||
return getDescriptor();
|
||||
}
|
||||
|
||||
|
||||
public final void callMethod(
|
||||
com.google.protobuf.Descriptors.MethodDescriptor method,
|
||||
com.google.protobuf.RpcController controller,
|
||||
|
@ -215,7 +189,7 @@ public final class TestRpcServiceProtos {
|
|||
throw new java.lang.AssertionError("Can't get here.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public final com.google.protobuf.Message
|
||||
getRequestPrototype(
|
||||
com.google.protobuf.Descriptors.MethodDescriptor method) {
|
||||
|
@ -235,7 +209,7 @@ public final class TestRpcServiceProtos {
|
|||
throw new java.lang.AssertionError("Can't get here.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public final com.google.protobuf.Message
|
||||
getResponsePrototype(
|
||||
com.google.protobuf.Descriptors.MethodDescriptor method) {
|
||||
|
@ -255,23 +229,23 @@ public final class TestRpcServiceProtos {
|
|||
throw new java.lang.AssertionError("Can't get here.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static Stub newStub(
|
||||
com.google.protobuf.RpcChannel channel) {
|
||||
return new Stub(channel);
|
||||
}
|
||||
|
||||
|
||||
public static final class Stub extends org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto implements Interface {
|
||||
private Stub(com.google.protobuf.RpcChannel channel) {
|
||||
this.channel = channel;
|
||||
}
|
||||
|
||||
|
||||
private final com.google.protobuf.RpcChannel channel;
|
||||
|
||||
|
||||
public com.google.protobuf.RpcChannel getChannel() {
|
||||
return channel;
|
||||
}
|
||||
|
||||
|
||||
public void ping(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
|
||||
|
@ -286,7 +260,7 @@ public final class TestRpcServiceProtos {
|
|||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()));
|
||||
}
|
||||
|
||||
|
||||
public void echo(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
|
||||
|
@ -301,7 +275,7 @@ public final class TestRpcServiceProtos {
|
|||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.class,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance()));
|
||||
}
|
||||
|
||||
|
||||
public void error(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
|
||||
|
@ -317,36 +291,36 @@ public final class TestRpcServiceProtos {
|
|||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static BlockingInterface newBlockingStub(
|
||||
com.google.protobuf.BlockingRpcChannel channel) {
|
||||
return new BlockingStub(channel);
|
||||
}
|
||||
|
||||
|
||||
public interface BlockingInterface {
|
||||
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
|
||||
throws com.google.protobuf.ServiceException;
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request)
|
||||
throws com.google.protobuf.ServiceException;
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
|
||||
throws com.google.protobuf.ServiceException;
|
||||
}
|
||||
|
||||
|
||||
private static final class BlockingStub implements BlockingInterface {
|
||||
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
|
||||
this.channel = channel;
|
||||
}
|
||||
|
||||
|
||||
private final com.google.protobuf.BlockingRpcChannel channel;
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
|
||||
|
@ -357,8 +331,8 @@ public final class TestRpcServiceProtos {
|
|||
request,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance());
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request)
|
||||
|
@ -369,8 +343,8 @@ public final class TestRpcServiceProtos {
|
|||
request,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance());
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error(
|
||||
com.google.protobuf.RpcController controller,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
|
||||
|
@ -381,13 +355,11 @@ public final class TestRpcServiceProtos {
|
|||
request,
|
||||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:TestProtobufRpcProto)
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
|
@ -419,6 +391,6 @@ public final class TestRpcServiceProtos {
|
|||
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.getDescriptor(),
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
These are the protobuf definition files used by tests. The produced java
|
||||
classes are generated into src/test/java/org/apache/hadoop/hbase/protobuf/generated
|
||||
and then checked in. The reasoning is that they change infrequently.
|
||||
|
||||
To regnerate the classes after making definition file changes, ensure first that
|
||||
the protobuf protoc tool is in your $PATH (You may need to download it and build
|
||||
it first; its part of the protobuf package obtainable from here:
|
||||
http://code.google.com/p/protobuf/downloads/list). Then run the following (You
|
||||
should be able to just copy and paste the below into a terminal and hit return
|
||||
-- the protoc compiler runs fast):
|
||||
|
||||
UNIX_PROTO_DIR=src/test/protobuf
|
||||
JAVA_DIR=src/test/java/
|
||||
mkdir -p $JAVA_DIR 2> /dev/null
|
||||
if which cygpath 2> /dev/null; then
|
||||
PROTO_DIR=`cygpath --windows $UNIX_PROTO_DIR`
|
||||
JAVA_DIR=`cygpath --windows $JAVA_DIR`
|
||||
else
|
||||
PROTO_DIR=$UNIX_PROTO_DIR
|
||||
fi
|
||||
for PROTO_FILE in $UNIX_PROTO_DIR/*.proto
|
||||
do
|
||||
protoc -I$PROTO_DIR --java_out=$JAVA_DIR $PROTO_FILE
|
||||
done
|
||||
|
||||
After you've done the above, check it in and then check it in (or post a patch
|
||||
on a JIRA with your definition file changes and the generated files).
|
2
pom.xml
2
pom.xml
|
@ -891,7 +891,7 @@
|
|||
<slf4j.version>1.4.3</slf4j.version>
|
||||
<log4j.version>1.2.17</log4j.version>
|
||||
<mockito-all.version>1.9.0</mockito-all.version>
|
||||
<protobuf.version>2.5.0</protobuf.version>
|
||||
<protobuf.version>2.4.1</protobuf.version>
|
||||
<stax-api.version>1.0.1</stax-api.version>
|
||||
<thrift.version>0.9.0</thrift.version>
|
||||
<zookeeper.version>3.4.5</zookeeper.version>
|
||||
|
|
Loading…
Reference in New Issue