HBASE-8165 Move to Hadoop 2.1.0-beta from 2.0.x-alpha (WAS: Update our protobuf to 2.5 from 2.4.1)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1516084 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-08-21 05:04:20 +00:00
parent 5bbe4dbc0d
commit d663f2baa1
50 changed files with 91692 additions and 48275 deletions

View File

@ -345,7 +345,7 @@ public class ServerName implements Comparable<ServerName>, Serializable {
int prefixLen = ProtobufUtil.lengthOfPBMagic();
try {
MetaRegionServer rss =
MetaRegionServer.newBuilder().mergeFrom(data, prefixLen, data.length - prefixLen).build();
MetaRegionServer.PARSER.parseFrom(data, prefixLen, data.length - prefixLen);
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName sn = rss.getServer();
return new ServerName(sn.getHostName(), sn.getPort(), sn.getStartCode());
} catch (InvalidProtocolBufferException e) {

View File

@ -132,6 +132,7 @@ import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import com.google.protobuf.RpcChannel;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
@ -2055,17 +2056,19 @@ public final class ProtobufUtil {
}
public static ScanMetrics toScanMetrics(final byte[] bytes) {
MapReduceProtos.ScanMetrics.Builder builder = MapReduceProtos.ScanMetrics.newBuilder();
Parser<MapReduceProtos.ScanMetrics> parser = MapReduceProtos.ScanMetrics.PARSER;
MapReduceProtos.ScanMetrics pScanMetrics = null;
try {
builder.mergeFrom(bytes);
pScanMetrics = parser.parseFrom(bytes);
} catch (InvalidProtocolBufferException e) {
//Ignored there are just no key values to add.
}
MapReduceProtos.ScanMetrics pScanMetrics = builder.build();
ScanMetrics scanMetrics = new ScanMetrics();
for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) {
if (pair.hasName() && pair.hasValue()) {
scanMetrics.setCounter(pair.getName(), pair.getValue());
if (pScanMetrics != null) {
for (HBaseProtos.NameInt64Pair pair : pScanMetrics.getMetricsList()) {
if (pair.hasName() && pair.hasValue()) {
scanMetrics.setCounter(pair.getName(), pair.getValue());
}
}
}
return scanMetrics;

View File

@ -824,24 +824,34 @@ public final class RequestConverter {
return builder.build();
}
/**
* @see {@link #buildRollWALWriterRequest()
*/
private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST =
RollWALWriterRequest.newBuilder().build();
/**
* Create a new RollWALWriterRequest
*
* @return a ReplicateWALEntryRequest
*/
public static RollWALWriterRequest buildRollWALWriterRequest() {
RollWALWriterRequest.Builder builder = RollWALWriterRequest.newBuilder();
return builder.build();
return ROLL_WAL_WRITER_REQUEST;
}
/**
* @see {@link #buildGetServerInfoRequest()}
*/
private static GetServerInfoRequest GET_SERVER_INFO_REQUEST =
GetServerInfoRequest.newBuilder().build();
/**
* Create a new GetServerInfoRequest
*
* @return a GetServerInfoRequest
*/
public static GetServerInfoRequest buildGetServerInfoRequest() {
GetServerInfoRequest.Builder builder = GetServerInfoRequest.newBuilder();
return builder.build();
return GET_SERVER_INFO_REQUEST;
}
/**
@ -1157,21 +1167,33 @@ public final class RequestConverter {
return SetBalancerRunningRequest.newBuilder().setOn(on).setSynchronous(synchronous).build();
}
/**
* @see {@link #buildGetClusterStatusRequest}
*/
private static final GetClusterStatusRequest GET_CLUSTER_STATUS_REQUEST =
GetClusterStatusRequest.newBuilder().build();
/**
* Creates a protocol buffer GetClusterStatusRequest
*
* @return A GetClusterStatusRequest
*/
public static GetClusterStatusRequest buildGetClusterStatusRequest() {
return GetClusterStatusRequest.newBuilder().build();
return GET_CLUSTER_STATUS_REQUEST;
}
/**
* @see {@link #buildCatalogScanRequest}
*/
private static final CatalogScanRequest CATALOG_SCAN_REQUEST =
CatalogScanRequest.newBuilder().build();
/**
* Creates a request for running a catalog scan
* @return A {@link CatalogScanRequest}
*/
public static CatalogScanRequest buildCatalogScanRequest() {
return CatalogScanRequest.newBuilder().build();
return CATALOG_SCAN_REQUEST;
}
/**
@ -1182,12 +1204,18 @@ public final class RequestConverter {
return EnableCatalogJanitorRequest.newBuilder().setEnable(enable).build();
}
/**
* @see {@link #buildIsCatalogJanitorEnabledRequest()}
*/
private static final IsCatalogJanitorEnabledRequest IS_CATALOG_JANITOR_ENABLED_REQUEST =
IsCatalogJanitorEnabledRequest.newBuilder().build();
/**
* Creates a request for querying the master whether the catalog janitor is enabled
* @return A {@link IsCatalogJanitorEnabledRequest}
*/
public static IsCatalogJanitorEnabledRequest buildIsCatalogJanitorEnabledRequest() {
return IsCatalogJanitorEnabledRequest.newBuilder().build();
return IS_CATALOG_JANITOR_ENABLED_REQUEST;
}
/**
@ -1413,4 +1441,4 @@ public final class RequestConverter {
}
return builder.build();
}
}
}

View File

@ -27,7 +27,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
@ -90,13 +89,24 @@ public class HBaseCommonTestingUtility {
}
String randomStr = UUID.randomUUID().toString();
Path testPath= new Path(getBaseTestDir(), randomStr);
Path testPath = new Path(getBaseTestDir(), randomStr);
this.dataTestDir = new File(testPath.toString()).getAbsoluteFile();
this.dataTestDir.deleteOnExit();
// Set this property so if mapreduce jobs run, they will use this as their home dir.
System.setProperty("test.build.dir", this.dataTestDir.toString());
if (deleteOnExit()) this.dataTestDir.deleteOnExit();
return testPath;
}
/**
* @return True if we should delete testing dirs on exit.
*/
boolean deleteOnExit() {
String v = System.getProperty("hbase.testing.preserve.testdir");
// Let default be true, to delete on exit.
return v == null? true: !Boolean.parseBoolean(v);
}
/**
* @return True if we removed the test dirs
* @throws IOException
@ -146,7 +156,7 @@ public class HBaseCommonTestingUtility {
return true;
}
try {
FileUtils.deleteDirectory(dir);
if (deleteOnExit()) FileUtils.deleteDirectory(dir);
return true;
} catch (IOException ex) {
LOG.warn("Failed to delete " + dir.getAbsolutePath());

View File

@ -10,72 +10,191 @@ public final class ClusterIdProtos {
}
public interface ClusterIdOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// required string cluster_id = 1;
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
boolean hasClusterId();
String getClusterId();
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
java.lang.String getClusterId();
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
com.google.protobuf.ByteString
getClusterIdBytes();
}
/**
* Protobuf type {@code ClusterId}
*
* <pre>
**
* Content of the '/hbase/hbaseid', cluster id, znode.
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
* </pre>
*/
public static final class ClusterId extends
com.google.protobuf.GeneratedMessage
implements ClusterIdOrBuilder {
// Use ClusterId.newBuilder() to construct.
private ClusterId(Builder builder) {
private ClusterId(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ClusterId(boolean noInit) {}
private ClusterId(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ClusterId defaultInstance;
public static ClusterId getDefaultInstance() {
return defaultInstance;
}
public ClusterId getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ClusterId(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
clusterId_ = input.readBytes();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
}
public static com.google.protobuf.Parser<ClusterId> PARSER =
new com.google.protobuf.AbstractParser<ClusterId>() {
public ClusterId parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ClusterId(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ClusterId> getParserForType() {
return PARSER;
}
private int bitField0_;
// required string cluster_id = 1;
public static final int CLUSTER_ID_FIELD_NUMBER = 1;
private java.lang.Object clusterId_;
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public boolean hasClusterId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public String getClusterId() {
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public java.lang.String getClusterId() {
java.lang.Object ref = clusterId_;
if (ref instanceof String) {
return (String) ref;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
String s = bs.toStringUtf8();
if (com.google.protobuf.Internal.isValidUtf8(bs)) {
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
clusterId_ = s;
}
return s;
}
}
private com.google.protobuf.ByteString getClusterIdBytes() {
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public com.google.protobuf.ByteString
getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof String) {
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((String) ref);
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
clusterId_ = "";
}
@ -83,7 +202,7 @@ public final class ClusterIdProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
if (!hasClusterId()) {
memoizedIsInitialized = 0;
return false;
@ -91,7 +210,7 @@ public final class ClusterIdProtos {
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@ -100,12 +219,12 @@ public final class ClusterIdProtos {
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@ -115,14 +234,14 @@ public final class ClusterIdProtos {
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@ -132,7 +251,7 @@ public final class ClusterIdProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) obj;
boolean result = true;
result = result && (hasClusterId() == other.hasClusterId());
if (hasClusterId()) {
@ -143,9 +262,13 @@ public final class ClusterIdProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasClusterId()) {
@ -153,89 +276,85 @@ public final class ClusterIdProtos {
hash = (53 * hash) + getClusterId().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ClusterId}
*
* <pre>
**
* Content of the '/hbase/hbaseid', cluster id, znode.
* Also cluster of the ${HBASE_ROOTDIR}/hbase.id file.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder {
@ -243,18 +362,21 @@ public final class ClusterIdProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -265,27 +387,27 @@ public final class ClusterIdProtos {
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
clusterId_ = "";
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.internal_static_ClusterId_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId build() {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial();
if (!result.isInitialized()) {
@ -293,17 +415,7 @@ public final class ClusterIdProtos {
}
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId(this);
int from_bitField0_ = bitField0_;
@ -316,7 +428,7 @@ public final class ClusterIdProtos {
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId)other);
@ -325,16 +437,18 @@ public final class ClusterIdProtos {
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) return this;
if (other.hasClusterId()) {
setClusterId(other.getClusterId());
bitField0_ |= 0x00000001;
clusterId_ = other.clusterId_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasClusterId()) {
@ -342,57 +456,85 @@ public final class ClusterIdProtos {
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
bitField0_ |= 0x00000001;
clusterId_ = input.readBytes();
break;
}
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// required string cluster_id = 1;
private java.lang.Object clusterId_ = "";
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public boolean hasClusterId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
public String getClusterId() {
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public java.lang.String getClusterId() {
java.lang.Object ref = clusterId_;
if (!(ref instanceof String)) {
String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
if (!(ref instanceof java.lang.String)) {
java.lang.String s = ((com.google.protobuf.ByteString) ref)
.toStringUtf8();
clusterId_ = s;
return s;
} else {
return (String) ref;
return (java.lang.String) ref;
}
}
public Builder setClusterId(String value) {
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public com.google.protobuf.ByteString
getClusterIdBytes() {
java.lang.Object ref = clusterId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
clusterId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public Builder setClusterId(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
@ -401,35 +543,54 @@ public final class ClusterIdProtos {
onChanged();
return this;
}
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public Builder clearClusterId() {
bitField0_ = (bitField0_ & ~0x00000001);
clusterId_ = getDefaultInstance().getClusterId();
onChanged();
return this;
}
void setClusterId(com.google.protobuf.ByteString value) {
bitField0_ |= 0x00000001;
/**
* <code>required string cluster_id = 1;</code>
*
* <pre>
* This is the cluster id, a uuid as a String
* </pre>
*/
public Builder setClusterIdBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
clusterId_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:ClusterId)
}
static {
defaultInstance = new ClusterId(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ClusterId)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ClusterId_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ClusterId_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@ -452,9 +613,7 @@ public final class ClusterIdProtos {
internal_static_ClusterId_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ClusterId_descriptor,
new java.lang.String[] { "ClusterId", },
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.class,
org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder.class);
new java.lang.String[] { "ClusterId", });
return null;
}
};
@ -463,6 +622,6 @@ public final class ClusterIdProtos {
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -10,50 +10,129 @@ public final class LoadBalancerProtos {
}
public interface LoadBalancerStateOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional bool balancer_on = 1;
/**
* <code>optional bool balancer_on = 1;</code>
*/
boolean hasBalancerOn();
/**
* <code>optional bool balancer_on = 1;</code>
*/
boolean getBalancerOn();
}
/**
* Protobuf type {@code LoadBalancerState}
*/
public static final class LoadBalancerState extends
com.google.protobuf.GeneratedMessage
implements LoadBalancerStateOrBuilder {
// Use LoadBalancerState.newBuilder() to construct.
private LoadBalancerState(Builder builder) {
private LoadBalancerState(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private LoadBalancerState(boolean noInit) {}
private LoadBalancerState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final LoadBalancerState defaultInstance;
public static LoadBalancerState getDefaultInstance() {
return defaultInstance;
}
public LoadBalancerState getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private LoadBalancerState(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
balancerOn_ = input.readBool();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
}
public static com.google.protobuf.Parser<LoadBalancerState> PARSER =
new com.google.protobuf.AbstractParser<LoadBalancerState>() {
public LoadBalancerState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new LoadBalancerState(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<LoadBalancerState> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional bool balancer_on = 1;
public static final int BALANCER_ON_FIELD_NUMBER = 1;
private boolean balancerOn_;
/**
* <code>optional bool balancer_on = 1;</code>
*/
public boolean hasBalancerOn() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool balancer_on = 1;</code>
*/
public boolean getBalancerOn() {
return balancerOn_;
}
private void initFields() {
balancerOn_ = false;
}
@ -61,11 +140,11 @@ public final class LoadBalancerProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@ -74,12 +153,12 @@ public final class LoadBalancerProtos {
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@ -89,14 +168,14 @@ public final class LoadBalancerProtos {
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@ -106,7 +185,7 @@ public final class LoadBalancerProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) obj;
boolean result = true;
result = result && (hasBalancerOn() == other.hasBalancerOn());
if (hasBalancerOn()) {
@ -117,9 +196,13 @@ public final class LoadBalancerProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasBalancerOn()) {
@ -127,89 +210,79 @@ public final class LoadBalancerProtos {
hash = (53 * hash) + hashBoolean(getBalancerOn());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code LoadBalancerState}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerStateOrBuilder {
@ -217,18 +290,21 @@ public final class LoadBalancerProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class, org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -239,27 +315,27 @@ public final class LoadBalancerProtos {
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
balancerOn_ = false;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.internal_static_LoadBalancerState_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState build() {
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial();
if (!result.isInitialized()) {
@ -267,17 +343,7 @@ public final class LoadBalancerProtos {
}
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState result = new org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState(this);
int from_bitField0_ = bitField0_;
@ -290,7 +356,7 @@ public final class LoadBalancerProtos {
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState)other);
@ -299,7 +365,7 @@ public final class LoadBalancerProtos {
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.getDefaultInstance()) return this;
if (other.hasBalancerOn()) {
@ -308,83 +374,80 @@ public final class LoadBalancerProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
balancerOn_ = input.readBool();
break;
}
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional bool balancer_on = 1;
private boolean balancerOn_ ;
/**
* <code>optional bool balancer_on = 1;</code>
*/
public boolean hasBalancerOn() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional bool balancer_on = 1;</code>
*/
public boolean getBalancerOn() {
return balancerOn_;
}
/**
* <code>optional bool balancer_on = 1;</code>
*/
public Builder setBalancerOn(boolean value) {
bitField0_ |= 0x00000001;
balancerOn_ = value;
onChanged();
return this;
}
/**
* <code>optional bool balancer_on = 1;</code>
*/
public Builder clearBalancerOn() {
bitField0_ = (bitField0_ & ~0x00000001);
balancerOn_ = false;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:LoadBalancerState)
}
static {
defaultInstance = new LoadBalancerState(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:LoadBalancerState)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_LoadBalancerState_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_LoadBalancerState_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@ -408,9 +471,7 @@ public final class LoadBalancerProtos {
internal_static_LoadBalancerState_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_LoadBalancerState_descriptor,
new java.lang.String[] { "BalancerOn", },
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.class,
org.apache.hadoop.hbase.protobuf.generated.LoadBalancerProtos.LoadBalancerState.Builder.class);
new java.lang.String[] { "BalancerOn", });
return null;
}
};
@ -419,6 +480,6 @@ public final class LoadBalancerProtos {
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -10,66 +10,169 @@ public final class MapReduceProtos {
}
public interface ScanMetricsOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated .NameInt64Pair metrics = 1;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair>
getMetricsList();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index);
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
int getMetricsCount();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList();
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index);
}
/**
* Protobuf type {@code ScanMetrics}
*/
public static final class ScanMetrics extends
com.google.protobuf.GeneratedMessage
implements ScanMetricsOrBuilder {
// Use ScanMetrics.newBuilder() to construct.
private ScanMetrics(Builder builder) {
private ScanMetrics(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private ScanMetrics(boolean noInit) {}
private ScanMetrics(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final ScanMetrics defaultInstance;
public static ScanMetrics getDefaultInstance() {
return defaultInstance;
}
public ScanMetrics getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ScanMetrics(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
metrics_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair>();
mutable_bitField0_ |= 0x00000001;
}
metrics_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.PARSER, extensionRegistry));
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
metrics_ = java.util.Collections.unmodifiableList(metrics_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
}
public static com.google.protobuf.Parser<ScanMetrics> PARSER =
new com.google.protobuf.AbstractParser<ScanMetrics>() {
public ScanMetrics parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ScanMetrics(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<ScanMetrics> getParserForType() {
return PARSER;
}
// repeated .NameInt64Pair metrics = 1;
public static final int METRICS_FIELD_NUMBER = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> metrics_;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
return metrics_;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList() {
return metrics_;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public int getMetricsCount() {
return metrics_.size();
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
return metrics_.get(index);
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index) {
return metrics_.get(index);
}
private void initFields() {
metrics_ = java.util.Collections.emptyList();
}
@ -77,11 +180,11 @@ public final class MapReduceProtos {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@ -90,12 +193,12 @@ public final class MapReduceProtos {
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < metrics_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
@ -105,14 +208,14 @@ public final class MapReduceProtos {
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@ -122,7 +225,7 @@ public final class MapReduceProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics other = (org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) obj;
boolean result = true;
result = result && getMetricsList()
.equals(other.getMetricsList());
@ -130,9 +233,13 @@ public final class MapReduceProtos {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (getMetricsCount() > 0) {
@ -140,89 +247,79 @@ public final class MapReduceProtos {
hash = (53 * hash) + getMetricsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code ScanMetrics}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetricsOrBuilder {
@ -230,18 +327,21 @@ public final class MapReduceProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class, org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -253,7 +353,7 @@ public final class MapReduceProtos {
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
if (metricsBuilder_ == null) {
@ -264,20 +364,20 @@ public final class MapReduceProtos {
}
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.internal_static_ScanMetrics_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics build() {
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = buildPartial();
if (!result.isInitialized()) {
@ -285,17 +385,7 @@ public final class MapReduceProtos {
}
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics result = new org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics(this);
int from_bitField0_ = bitField0_;
@ -311,7 +401,7 @@ public final class MapReduceProtos {
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics)other);
@ -320,7 +410,7 @@ public final class MapReduceProtos {
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.getDefaultInstance()) return this;
if (metricsBuilder_ == null) {
@ -352,46 +442,30 @@ public final class MapReduceProtos {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addMetrics(subBuilder.buildPartial());
break;
}
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated .NameInt64Pair metrics = 1;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> metrics_ =
java.util.Collections.emptyList();
@ -401,10 +475,13 @@ public final class MapReduceProtos {
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder> metricsBuilder_;
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> getMetricsList() {
if (metricsBuilder_ == null) {
return java.util.Collections.unmodifiableList(metrics_);
@ -412,6 +489,9 @@ public final class MapReduceProtos {
return metricsBuilder_.getMessageList();
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public int getMetricsCount() {
if (metricsBuilder_ == null) {
return metrics_.size();
@ -419,6 +499,9 @@ public final class MapReduceProtos {
return metricsBuilder_.getCount();
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair getMetrics(int index) {
if (metricsBuilder_ == null) {
return metrics_.get(index);
@ -426,6 +509,9 @@ public final class MapReduceProtos {
return metricsBuilder_.getMessage(index);
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder setMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
@ -440,6 +526,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder setMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
if (metricsBuilder_ == null) {
@ -451,6 +540,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
if (value == null) {
@ -464,6 +556,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair value) {
if (metricsBuilder_ == null) {
@ -478,6 +573,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
if (metricsBuilder_ == null) {
@ -489,6 +587,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addMetrics(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder builderForValue) {
if (metricsBuilder_ == null) {
@ -500,6 +601,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder addAllMetrics(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair> values) {
if (metricsBuilder_ == null) {
@ -511,6 +615,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder clearMetrics() {
if (metricsBuilder_ == null) {
metrics_ = java.util.Collections.emptyList();
@ -521,6 +628,9 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public Builder removeMetrics(int index) {
if (metricsBuilder_ == null) {
ensureMetricsIsMutable();
@ -531,10 +641,16 @@ public final class MapReduceProtos {
}
return this;
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder getMetricsBuilder(
int index) {
return getMetricsFieldBuilder().getBuilder(index);
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder getMetricsOrBuilder(
int index) {
if (metricsBuilder_ == null) {
@ -542,6 +658,9 @@ public final class MapReduceProtos {
return metricsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64PairOrBuilder>
getMetricsOrBuilderList() {
if (metricsBuilder_ != null) {
@ -550,15 +669,24 @@ public final class MapReduceProtos {
return java.util.Collections.unmodifiableList(metrics_);
}
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder() {
return getMetricsFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder addMetricsBuilder(
int index) {
return getMetricsFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.getDefaultInstance());
}
/**
* <code>repeated .NameInt64Pair metrics = 1;</code>
*/
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameInt64Pair.Builder>
getMetricsBuilderList() {
return getMetricsFieldBuilder().getBuilderList();
@ -577,24 +705,24 @@ public final class MapReduceProtos {
}
return metricsBuilder_;
}
// @@protoc_insertion_point(builder_scope:ScanMetrics)
}
static {
defaultInstance = new ScanMetrics(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:ScanMetrics)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_ScanMetrics_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_ScanMetrics_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@ -618,9 +746,7 @@ public final class MapReduceProtos {
internal_static_ScanMetrics_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ScanMetrics_descriptor,
new java.lang.String[] { "Metrics", },
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.class,
org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos.ScanMetrics.Builder.class);
new java.lang.String[] { "Metrics", });
return null;
}
};
@ -630,6 +756,6 @@ public final class MapReduceProtos {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -11,69 +11,130 @@ public final class MultiRowMutationProcessorProtos {
public interface MultiRowMutationProcessorRequestOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiRowMutationProcessorRequest}
*/
public static final class MultiRowMutationProcessorRequest extends
com.google.protobuf.GeneratedMessage
implements MultiRowMutationProcessorRequestOrBuilder {
// Use MultiRowMutationProcessorRequest.newBuilder() to construct.
private MultiRowMutationProcessorRequest(Builder builder) {
private MultiRowMutationProcessorRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiRowMutationProcessorRequest(boolean noInit) {}
private MultiRowMutationProcessorRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiRowMutationProcessorRequest defaultInstance;
public static MultiRowMutationProcessorRequest getDefaultInstance() {
return defaultInstance;
}
public MultiRowMutationProcessorRequest getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiRowMutationProcessorRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
}
public static com.google.protobuf.Parser<MultiRowMutationProcessorRequest> PARSER =
new com.google.protobuf.AbstractParser<MultiRowMutationProcessorRequest>() {
public MultiRowMutationProcessorRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiRowMutationProcessorRequest(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiRowMutationProcessorRequest> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@ -83,101 +144,95 @@ public final class MultiRowMutationProcessorProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiRowMutationProcessorRequest}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequestOrBuilder {
@ -185,18 +240,21 @@ public final class MultiRowMutationProcessorProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -207,25 +265,25 @@ public final class MultiRowMutationProcessorProtos {
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorRequest_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest build() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial();
if (!result.isInitialized()) {
@ -233,23 +291,13 @@ public final class MultiRowMutationProcessorProtos {
}
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest)other);
@ -258,122 +306,173 @@ public final class MultiRowMutationProcessorProtos {
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorRequest)
}
static {
defaultInstance = new MultiRowMutationProcessorRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:MultiRowMutationProcessorRequest)
}
public interface MultiRowMutationProcessorResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
/**
* Protobuf type {@code MultiRowMutationProcessorResponse}
*/
public static final class MultiRowMutationProcessorResponse extends
com.google.protobuf.GeneratedMessage
implements MultiRowMutationProcessorResponseOrBuilder {
// Use MultiRowMutationProcessorResponse.newBuilder() to construct.
private MultiRowMutationProcessorResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private MultiRowMutationProcessorResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final MultiRowMutationProcessorResponse defaultInstance;
public static MultiRowMutationProcessorResponse getDefaultInstance() {
return defaultInstance;
}
public MultiRowMutationProcessorResponse getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MultiRowMutationProcessorResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorRequest)
}
static {
defaultInstance = new MultiRowMutationProcessorRequest(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:MultiRowMutationProcessorRequest)
}
public interface MultiRowMutationProcessorResponseOrBuilder
extends com.google.protobuf.MessageOrBuilder {
}
public static final class MultiRowMutationProcessorResponse extends
com.google.protobuf.GeneratedMessage
implements MultiRowMutationProcessorResponseOrBuilder {
// Use MultiRowMutationProcessorResponse.newBuilder() to construct.
private MultiRowMutationProcessorResponse(Builder builder) {
super(builder);
}
private MultiRowMutationProcessorResponse(boolean noInit) {}
private static final MultiRowMutationProcessorResponse defaultInstance;
public static MultiRowMutationProcessorResponse getDefaultInstance() {
return defaultInstance;
}
public MultiRowMutationProcessorResponse getDefaultInstanceForType() {
return defaultInstance;
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
}
public static com.google.protobuf.Parser<MultiRowMutationProcessorResponse> PARSER =
new com.google.protobuf.AbstractParser<MultiRowMutationProcessorResponse>() {
public MultiRowMutationProcessorResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MultiRowMutationProcessorResponse(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<MultiRowMutationProcessorResponse> getParserForType() {
return PARSER;
}
private void initFields() {
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@ -383,101 +482,95 @@ public final class MultiRowMutationProcessorProtos {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) obj;
boolean result = true;
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code MultiRowMutationProcessorResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponseOrBuilder {
@ -485,18 +578,21 @@ public final class MultiRowMutationProcessorProtos {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class, org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -507,25 +603,25 @@ public final class MultiRowMutationProcessorProtos {
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.internal_static_MultiRowMutationProcessorResponse_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse build() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial();
if (!result.isInitialized()) {
@ -533,23 +629,13 @@ public final class MultiRowMutationProcessorProtos {
}
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse(this);
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse)other);
@ -558,56 +644,46 @@ public final class MultiRowMutationProcessorProtos {
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.getDefaultInstance()) return this;
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
// @@protoc_insertion_point(builder_scope:MultiRowMutationProcessorResponse)
}
static {
defaultInstance = new MultiRowMutationProcessorResponse(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:MultiRowMutationProcessorResponse)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_MultiRowMutationProcessorRequest_descriptor;
private static
@ -618,7 +694,7 @@ public final class MultiRowMutationProcessorProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@ -643,17 +719,13 @@ public final class MultiRowMutationProcessorProtos {
internal_static_MultiRowMutationProcessorRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiRowMutationProcessorRequest_descriptor,
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorRequest.Builder.class);
new java.lang.String[] { });
internal_static_MultiRowMutationProcessorResponse_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_MultiRowMutationProcessorResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiRowMutationProcessorResponse_descriptor,
new java.lang.String[] { },
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.class,
org.apache.hadoop.hbase.protobuf.generated.MultiRowMutationProcessorProtos.MultiRowMutationProcessorResponse.Builder.class);
new java.lang.String[] { });
return null;
}
};
@ -662,6 +734,6 @@ public final class MultiRowMutationProcessorProtos {
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -10,64 +10,168 @@ public final class Tracing {
}
public interface RPCTInfoOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// optional int64 trace_id = 1;
/**
* <code>optional int64 trace_id = 1;</code>
*/
boolean hasTraceId();
/**
* <code>optional int64 trace_id = 1;</code>
*/
long getTraceId();
// optional int64 parent_id = 2;
/**
* <code>optional int64 parent_id = 2;</code>
*/
boolean hasParentId();
/**
* <code>optional int64 parent_id = 2;</code>
*/
long getParentId();
}
/**
* Protobuf type {@code RPCTInfo}
*
* <pre>
*Used to pass through the information necessary to continue
*a trace after an RPC is made. All we need is the traceid
*(so we know the overarching trace this message is a part of), and
*the id of the current span when this message was sent, so we know
*what span caused the new span we will create when this message is received.
* </pre>
*/
public static final class RPCTInfo extends
com.google.protobuf.GeneratedMessage
implements RPCTInfoOrBuilder {
// Use RPCTInfo.newBuilder() to construct.
private RPCTInfo(Builder builder) {
private RPCTInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private RPCTInfo(boolean noInit) {}
private RPCTInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final RPCTInfo defaultInstance;
public static RPCTInfo getDefaultInstance() {
return defaultInstance;
}
public RPCTInfo getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private RPCTInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
traceId_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
parentId_ = input.readInt64();
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
}
public static com.google.protobuf.Parser<RPCTInfo> PARSER =
new com.google.protobuf.AbstractParser<RPCTInfo>() {
public RPCTInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RPCTInfo(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<RPCTInfo> getParserForType() {
return PARSER;
}
private int bitField0_;
// optional int64 trace_id = 1;
public static final int TRACE_ID_FIELD_NUMBER = 1;
private long traceId_;
/**
* <code>optional int64 trace_id = 1;</code>
*/
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 trace_id = 1;</code>
*/
public long getTraceId() {
return traceId_;
}
// optional int64 parent_id = 2;
public static final int PARENT_ID_FIELD_NUMBER = 2;
private long parentId_;
/**
* <code>optional int64 parent_id = 2;</code>
*/
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 parent_id = 2;</code>
*/
public long getParentId() {
return parentId_;
}
private void initFields() {
traceId_ = 0L;
parentId_ = 0L;
@ -76,11 +180,11 @@ public final class Tracing {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@ -92,12 +196,12 @@ public final class Tracing {
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
@ -111,14 +215,14 @@ public final class Tracing {
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
@ -128,7 +232,7 @@ public final class Tracing {
return super.equals(obj);
}
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) obj;
boolean result = true;
result = result && (hasTraceId() == other.hasTraceId());
if (hasTraceId()) {
@ -144,9 +248,13 @@ public final class Tracing {
getUnknownFields().equals(other.getUnknownFields());
return result;
}
private int memoizedHashCode = 0;
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasTraceId()) {
@ -158,89 +266,87 @@ public final class Tracing {
hash = (53 * hash) + hashLong(getParentId());
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code RPCTInfo}
*
* <pre>
*Used to pass through the information necessary to continue
*a trace after an RPC is made. All we need is the traceid
*(so we know the overarching trace this message is a part of), and
*the id of the current span when this message was sent, so we know
*what span caused the new span we will create when this message is received.
* </pre>
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder {
@ -248,18 +354,21 @@ public final class Tracing {
getDescriptor() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class, org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
}
// Construct using org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -270,7 +379,7 @@ public final class Tracing {
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
traceId_ = 0L;
@ -279,20 +388,20 @@ public final class Tracing {
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDescriptor();
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
}
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getDefaultInstanceForType() {
return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance();
}
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo build() {
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial();
if (!result.isInitialized()) {
@ -300,17 +409,7 @@ public final class Tracing {
}
return result;
}
private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildPartial() {
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo(this);
int from_bitField0_ = bitField0_;
@ -327,7 +426,7 @@ public final class Tracing {
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) {
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)other);
@ -336,7 +435,7 @@ public final class Tracing {
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other) {
if (other == org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) return this;
if (other.hasTraceId()) {
@ -348,109 +447,113 @@ public final class Tracing {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 8: {
bitField0_ |= 0x00000001;
traceId_ = input.readInt64();
break;
}
case 16: {
bitField0_ |= 0x00000002;
parentId_ = input.readInt64();
break;
}
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// optional int64 trace_id = 1;
private long traceId_ ;
/**
* <code>optional int64 trace_id = 1;</code>
*/
public boolean hasTraceId() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>optional int64 trace_id = 1;</code>
*/
public long getTraceId() {
return traceId_;
}
/**
* <code>optional int64 trace_id = 1;</code>
*/
public Builder setTraceId(long value) {
bitField0_ |= 0x00000001;
traceId_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 trace_id = 1;</code>
*/
public Builder clearTraceId() {
bitField0_ = (bitField0_ & ~0x00000001);
traceId_ = 0L;
onChanged();
return this;
}
// optional int64 parent_id = 2;
private long parentId_ ;
/**
* <code>optional int64 parent_id = 2;</code>
*/
public boolean hasParentId() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>optional int64 parent_id = 2;</code>
*/
public long getParentId() {
return parentId_;
}
/**
* <code>optional int64 parent_id = 2;</code>
*/
public Builder setParentId(long value) {
bitField0_ |= 0x00000002;
parentId_ = value;
onChanged();
return this;
}
/**
* <code>optional int64 parent_id = 2;</code>
*/
public Builder clearParentId() {
bitField0_ = (bitField0_ & ~0x00000002);
parentId_ = 0L;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:RPCTInfo)
}
static {
defaultInstance = new RPCTInfo(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:RPCTInfo)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_RPCTInfo_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_RPCTInfo_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@ -474,9 +577,7 @@ public final class Tracing {
internal_static_RPCTInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_RPCTInfo_descriptor,
new java.lang.String[] { "TraceId", "ParentId", },
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class,
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
new java.lang.String[] { "TraceId", "ParentId", });
return null;
}
};
@ -485,6 +586,6 @@ public final class Tracing {
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -404,19 +404,17 @@ service MasterAdminService {
/**
* Create a snapshot for the given table.
* @param snapshot description of the snapshot to take
*/
rpc Snapshot(TakeSnapshotRequest) returns(TakeSnapshotResponse);
/**
* List completed snapshots.
* Returns a list of snapshot descriptors for completed snapshots
* @return a list of snapshot descriptors for completed snapshots
*/
rpc GetCompletedSnapshots(ListSnapshotRequest) returns(ListSnapshotResponse);
/**
* Delete an existing snapshot. This method can also be used to clean up an aborted snapshot.
* @param snapshotName snapshot to delete
*/
rpc DeleteSnapshot(DeleteSnapshotRequest) returns(DeleteSnapshotResponse);
@ -427,7 +425,6 @@ service MasterAdminService {
/**
* Restore a snapshot
* @param snapshot description of the snapshot to restore
*/
rpc RestoreSnapshot(RestoreSnapshotRequest) returns(RestoreSnapshotResponse);

View File

@ -10,54 +10,158 @@ public final class TableListMessage {
}
public interface TableListOrBuilder
extends com.google.protobuf.MessageOrBuilder {
// repeated string name = 1;
java.util.List<String> getNameList();
/**
* <code>repeated string name = 1;</code>
*/
java.util.List<java.lang.String>
getNameList();
/**
* <code>repeated string name = 1;</code>
*/
int getNameCount();
String getName(int index);
/**
* <code>repeated string name = 1;</code>
*/
java.lang.String getName(int index);
/**
* <code>repeated string name = 1;</code>
*/
com.google.protobuf.ByteString
getNameBytes(int index);
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
*/
public static final class TableList extends
com.google.protobuf.GeneratedMessage
implements TableListOrBuilder {
// Use TableList.newBuilder() to construct.
private TableList(Builder builder) {
private TableList(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private TableList(boolean noInit) {}
private TableList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final TableList defaultInstance;
public static TableList getDefaultInstance() {
return defaultInstance;
}
public TableList getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private TableList(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
name_ = new com.google.protobuf.LazyStringArrayList();
mutable_bitField0_ |= 0x00000001;
}
name_.add(input.readBytes());
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) {
name_ = new com.google.protobuf.UnmodifiableLazyStringList(name_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
}
public static com.google.protobuf.Parser<TableList> PARSER =
new com.google.protobuf.AbstractParser<TableList>() {
public TableList parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new TableList(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<TableList> getParserForType() {
return PARSER;
}
// repeated string name = 1;
public static final int NAME_FIELD_NUMBER = 1;
private com.google.protobuf.LazyStringList name_;
public java.util.List<String>
/**
* <code>repeated string name = 1;</code>
*/
public java.util.List<java.lang.String>
getNameList() {
return name_;
}
/**
* <code>repeated string name = 1;</code>
*/
public int getNameCount() {
return name_.size();
}
public String getName(int index) {
/**
* <code>repeated string name = 1;</code>
*/
public java.lang.String getName(int index) {
return name_.get(index);
}
/**
* <code>repeated string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes(int index) {
return name_.getByteString(index);
}
private void initFields() {
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
}
@ -65,11 +169,11 @@ public final class TableListMessage {
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized != -1) return isInitialized == 1;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
@ -78,12 +182,12 @@ public final class TableListMessage {
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
@ -98,94 +202,83 @@ public final class TableListMessage {
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data).buildParsed();
return PARSER.parseFrom(data);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return newBuilder().mergeFrom(data, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(data, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(java.io.InputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
Builder builder = newBuilder();
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
return builder.buildParsed();
} else {
return null;
}
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return newBuilder().mergeFrom(input).buildParsed();
return PARSER.parseFrom(input);
}
public static org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return newBuilder().mergeFrom(input, extensionRegistry)
.buildParsed();
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code org.apache.hadoop.hbase.rest.protobuf.generated.TableList}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder>
implements org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableListOrBuilder {
@ -193,18 +286,21 @@ public final class TableListMessage {
getDescriptor() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable
.ensureFieldAccessorsInitialized(
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class, org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
}
// Construct using org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(BuilderParent parent) {
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
@ -215,27 +311,27 @@ public final class TableListMessage {
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDescriptor();
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
}
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList getDefaultInstanceForType() {
return org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDefaultInstance();
}
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList build() {
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = buildPartial();
if (!result.isInitialized()) {
@ -243,17 +339,7 @@ public final class TableListMessage {
}
return result;
}
private org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildParsed()
throws com.google.protobuf.InvalidProtocolBufferException {
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(
result).asInvalidProtocolBufferException();
}
return result;
}
public org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList buildPartial() {
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList result = new org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList(this);
int from_bitField0_ = bitField0_;
@ -266,7 +352,7 @@ public final class TableListMessage {
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList) {
return mergeFrom((org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList)other);
@ -275,7 +361,7 @@ public final class TableListMessage {
return this;
}
}
public Builder mergeFrom(org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList other) {
if (other == org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.getDefaultInstance()) return this;
if (!other.name_.isEmpty()) {
@ -291,45 +377,30 @@ public final class TableListMessage {
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder(
this.getUnknownFields());
while (true) {
int tag = input.readTag();
switch (tag) {
case 0:
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
this.setUnknownFields(unknownFields.build());
onChanged();
return this;
}
break;
}
case 10: {
ensureNameIsMutable();
name_.add(input.readBytes());
break;
}
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
// repeated string name = 1;
private com.google.protobuf.LazyStringList name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
private void ensureNameIsMutable() {
@ -338,18 +409,37 @@ public final class TableListMessage {
bitField0_ |= 0x00000001;
}
}
public java.util.List<String>
/**
* <code>repeated string name = 1;</code>
*/
public java.util.List<java.lang.String>
getNameList() {
return java.util.Collections.unmodifiableList(name_);
}
/**
* <code>repeated string name = 1;</code>
*/
public int getNameCount() {
return name_.size();
}
public String getName(int index) {
/**
* <code>repeated string name = 1;</code>
*/
public java.lang.String getName(int index) {
return name_.get(index);
}
/**
* <code>repeated string name = 1;</code>
*/
public com.google.protobuf.ByteString
getNameBytes(int index) {
return name_.getByteString(index);
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder setName(
int index, String value) {
int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
@ -358,7 +448,11 @@ public final class TableListMessage {
onChanged();
return this;
}
public Builder addName(String value) {
/**
* <code>repeated string name = 1;</code>
*/
public Builder addName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
@ -367,42 +461,56 @@ public final class TableListMessage {
onChanged();
return this;
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder addAllName(
java.lang.Iterable<String> values) {
java.lang.Iterable<java.lang.String> values) {
ensureNameIsMutable();
super.addAll(values, name_);
onChanged();
return this;
}
/**
* <code>repeated string name = 1;</code>
*/
public Builder clearName() {
name_ = com.google.protobuf.LazyStringArrayList.EMPTY;
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
void addName(com.google.protobuf.ByteString value) {
ensureNameIsMutable();
/**
* <code>repeated string name = 1;</code>
*/
public Builder addNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
ensureNameIsMutable();
name_.add(value);
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableList)
}
static {
defaultInstance = new TableList(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:org.apache.hadoop.hbase.rest.protobuf.generated.TableList)
}
private static com.google.protobuf.Descriptors.Descriptor
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@ -425,9 +533,7 @@ public final class TableListMessage {
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_org_apache_hadoop_hbase_rest_protobuf_generated_TableList_descriptor,
new java.lang.String[] { "Name", },
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.class,
org.apache.hadoop.hbase.rest.protobuf.generated.TableListMessage.TableList.Builder.class);
new java.lang.String[] { "Name", });
return null;
}
};
@ -436,6 +542,6 @@ public final class TableListMessage {
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -1,7 +1,4 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@ -120,6 +117,8 @@ import org.apache.zookeeper.ZooKeeper.States;
* Depends on log4j being on classpath and
* hbase-site.xml for logging and test-run configuration. It does not set
* logging levels nor make changes to configuration parameters.
* <p>To preserve test data directories, pass the system property "hbase.testing.preserve.testdir"
* setting it to true.
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
@ -284,7 +283,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
private void createSubDir(String propertyName, Path parent, String subDirName){
Path newPath= new Path(parent, subDirName);
File newDir = new File(newPath.toString()).getAbsoluteFile();
newDir.deleteOnExit();
if (deleteOnExit()) newDir.deleteOnExit();
conf.set(propertyName, newDir.getAbsolutePath());
}
@ -350,9 +349,10 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
Path testDir = getDataTestDir("dfscluster_" + UUID.randomUUID().toString());
clusterTestDir = new File(testDir.toString()).getAbsoluteFile();
// Have it cleaned up on exit
clusterTestDir.deleteOnExit();
boolean b = deleteOnExit();
if (b) clusterTestDir.deleteOnExit();
conf.set(TEST_DIRECTORY_KEY, clusterTestDir.getPath());
LOG.info("Created new mini-cluster data directory: " + clusterTestDir);
LOG.info("Created new mini-cluster data directory: " + clusterTestDir + ", deleteOnExit=" + b);
}
/**
@ -397,13 +397,13 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
FileSystem fs = getTestFileSystem();
if (fs.getUri().getScheme().equals(FileSystem.getLocal(conf).getUri().getScheme())) {
File dataTestDir = new File(getDataTestDir().toString());
dataTestDir.deleteOnExit();
if (deleteOnExit()) dataTestDir.deleteOnExit();
dataTestDirOnTestFS = new Path(dataTestDir.getAbsolutePath());
} else {
Path base = getBaseTestDirOnTestFS();
String randomStr = UUID.randomUUID().toString();
dataTestDirOnTestFS = new Path(base, randomStr);
fs.deleteOnExit(dataTestDirOnTestFS);
if (deleteOnExit()) fs.deleteOnExit(dataTestDirOnTestFS);
}
}

View File

@ -8,28 +8,45 @@ public final class TestRpcServiceProtos {
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
/**
* Protobuf service {@code TestProtobufRpcProto}
*
* <pre>
**
* A protobuf service for use in tests
* </pre>
*/
public static abstract class TestProtobufRpcProto
implements com.google.protobuf.Service {
protected TestProtobufRpcProto() {}
public interface Interface {
/**
* <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
/**
* <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
/**
* <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
}
public static com.google.protobuf.Service newReflectiveService(
final Interface impl) {
return new TestProtobufRpcProto() {
@ -40,7 +57,7 @@ public final class TestRpcServiceProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) {
impl.ping(controller, request, done);
}
@java.lang.Override
public void echo(
com.google.protobuf.RpcController controller,
@ -48,7 +65,7 @@ public final class TestRpcServiceProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done) {
impl.echo(controller, request, done);
}
@java.lang.Override
public void error(
com.google.protobuf.RpcController controller,
@ -56,10 +73,10 @@ public final class TestRpcServiceProtos {
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done) {
impl.error(controller, request, done);
}
};
}
public static com.google.protobuf.BlockingService
newReflectiveBlockingService(final BlockingInterface impl) {
return new com.google.protobuf.BlockingService() {
@ -67,7 +84,7 @@ public final class TestRpcServiceProtos {
getDescriptorForType() {
return getDescriptor();
}
public final com.google.protobuf.Message callBlockingMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
@ -89,7 +106,7 @@ public final class TestRpcServiceProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@ -109,7 +126,7 @@ public final class TestRpcServiceProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@ -129,25 +146,34 @@ public final class TestRpcServiceProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
};
}
/**
* <code>rpc ping(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
/**
* <code>rpc echo(.EchoRequestProto) returns (.EchoResponseProto);</code>
*/
public abstract void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto> done);
/**
* <code>rpc error(.EmptyRequestProto) returns (.EmptyResponseProto);</code>
*/
public abstract void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto> done);
public static final
com.google.protobuf.Descriptors.ServiceDescriptor
getDescriptor() {
@ -157,7 +183,7 @@ public final class TestRpcServiceProtos {
getDescriptorForType() {
return getDescriptor();
}
public final void callMethod(
com.google.protobuf.Descriptors.MethodDescriptor method,
com.google.protobuf.RpcController controller,
@ -189,7 +215,7 @@ public final class TestRpcServiceProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getRequestPrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@ -209,7 +235,7 @@ public final class TestRpcServiceProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
public final com.google.protobuf.Message
getResponsePrototype(
com.google.protobuf.Descriptors.MethodDescriptor method) {
@ -229,23 +255,23 @@ public final class TestRpcServiceProtos {
throw new java.lang.AssertionError("Can't get here.");
}
}
public static Stub newStub(
com.google.protobuf.RpcChannel channel) {
return new Stub(channel);
}
public static final class Stub extends org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto implements Interface {
private Stub(com.google.protobuf.RpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.RpcChannel channel;
public com.google.protobuf.RpcChannel getChannel() {
return channel;
}
public void ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
@ -260,7 +286,7 @@ public final class TestRpcServiceProtos {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.class,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
public void echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request,
@ -275,7 +301,7 @@ public final class TestRpcServiceProtos {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.class,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance()));
}
public void error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request,
@ -291,36 +317,36 @@ public final class TestRpcServiceProtos {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance()));
}
}
public static BlockingInterface newBlockingStub(
com.google.protobuf.BlockingRpcChannel channel) {
return new BlockingStub(channel);
}
public interface BlockingInterface {
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request)
throws com.google.protobuf.ServiceException;
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
throws com.google.protobuf.ServiceException;
}
private static final class BlockingStub implements BlockingInterface {
private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
this.channel = channel;
}
private final com.google.protobuf.BlockingRpcChannel channel;
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto ping(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
@ -331,8 +357,8 @@ public final class TestRpcServiceProtos {
request,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance());
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto echo(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoRequestProto request)
@ -343,8 +369,8 @@ public final class TestRpcServiceProtos {
request,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EchoResponseProto.getDefaultInstance());
}
public org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto error(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyRequestProto request)
@ -355,11 +381,13 @@ public final class TestRpcServiceProtos {
request,
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.EmptyResponseProto.getDefaultInstance());
}
}
// @@protoc_insertion_point(class_scope:TestProtobufRpcProto)
}
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
@ -391,6 +419,6 @@ public final class TestRpcServiceProtos {
org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos.getDescriptor(),
}, assigner);
}
// @@protoc_insertion_point(outer_class_scope)
}

View File

@ -365,6 +365,10 @@
</developer>
</developers>
<repositories>
<repository>
<id>Arun Staging 2.1.0-beta RCs</id>
<url>https://repository.apache.org/content/repositories/orgapachehadoop-099/</url>
</repository>
<repository>
<id>cloudbees netty</id>
<url>http://repository-netty.forge.cloudbees.com/snapshot/</url>
@ -881,7 +885,7 @@
<buildDate>${maven.build.timestamp}</buildDate>
<compileSource>1.6</compileSource>
<!-- Dependencies -->
<hadoop-two.version>2.0.5-alpha</hadoop-two.version>
<hadoop-two.version>2.1.0-beta</hadoop-two.version>
<hadoop-one.version>1.2.1</hadoop-one.version>
<commons-cli.version>1.2</commons-cli.version>
<commons-codec.version>1.7</commons-codec.version>
@ -905,7 +909,7 @@
<htrace.version>2.00</htrace.version>
<log4j.version>1.2.17</log4j.version>
<mockito-all.version>1.9.0</mockito-all.version>
<protobuf.version>2.4.1</protobuf.version>
<protobuf.version>2.5.0</protobuf.version>
<stax-api.version>1.0.1</stax-api.version>
<thrift.version>0.9.0</thrift.version>
<zookeeper.version>3.4.5</zookeeper.version>