diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index 3db0379dd4f..3b826b0fffa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -106,7 +106,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Re import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos; import org.apache.hadoop.hbase.protobuf.generated.MasterAdminProtos.CreateTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterMonitorProtos.GetTableDescriptorsResponse; -import org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.security.access.Permission; import org.apache.hadoop.hbase.security.access.TablePermission; import org.apache.hadoop.hbase.security.access.UserPermission; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java index 99db83091bf..8e66fef8e3a 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/codec/KeyValueCodec.java @@ -43,8 +43,8 @@ import org.apache.hadoop.hbase.KeyValueUtil; * */ public class KeyValueCodec implements Codec { - static class KeyValueEncoder extends BaseEncoder { - KeyValueEncoder(final OutputStream out) { + public static class KeyValueEncoder extends BaseEncoder { + public KeyValueEncoder(final OutputStream out) { super(out); } @@ -61,8 +61,8 @@ public class KeyValueCodec implements Codec { } } - static class KeyValueDecoder extends BaseDecoder { - KeyValueDecoder(final InputStream in) { + public static class KeyValueDecoder extends BaseDecoder { + public KeyValueDecoder(final InputStream in) { super(in); } diff --git a/hbase-common/src/main/resources/hbase-default.xml b/hbase-common/src/main/resources/hbase-default.xml index 3c33dc96e5b..23e71317ece 100644 --- a/hbase-common/src/main/resources/hbase-default.xml +++ b/hbase-common/src/main/resources/hbase-default.xml @@ -215,12 +215,12 @@ hbase.regionserver.hlog.reader.impl - org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogReader + org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader The HLog file reader implementation. hbase.regionserver.hlog.writer.impl - org.apache.hadoop.hbase.regionserver.wal.SequenceFileLogWriter + org.apache.hadoop.hbase.regionserver.wal.ProtobufLogWriter The HLog file writer implementation. diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java index a23c4987a8c..89c71e80ec4 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/AdminProtos.java @@ -9591,471 +9591,18 @@ public final class AdminProtos { // @@protoc_insertion_point(class_scope:MergeRegionsResponse) } - public interface UUIDOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required uint64 leastSigBits = 1; - boolean hasLeastSigBits(); - long getLeastSigBits(); - - // required uint64 mostSigBits = 2; - boolean hasMostSigBits(); - long getMostSigBits(); - } - public static final class UUID extends - com.google.protobuf.GeneratedMessage - implements UUIDOrBuilder { - // Use UUID.newBuilder() to construct. - private UUID(Builder builder) { - super(builder); - } - private UUID(boolean noInit) {} - - private static final UUID defaultInstance; - public static UUID getDefaultInstance() { - return defaultInstance; - } - - public UUID getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; - } - - private int bitField0_; - // required uint64 leastSigBits = 1; - public static final int LEASTSIGBITS_FIELD_NUMBER = 1; - private long leastSigBits_; - public boolean hasLeastSigBits() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLeastSigBits() { - return leastSigBits_; - } - - // required uint64 mostSigBits = 2; - public static final int MOSTSIGBITS_FIELD_NUMBER = 2; - private long mostSigBits_; - public boolean hasMostSigBits() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getMostSigBits() { - return mostSigBits_; - } - - private void initFields() { - leastSigBits_ = 0L; - mostSigBits_ = 0L; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasLeastSigBits()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMostSigBits()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeUInt64(1, leastSigBits_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeUInt64(2, mostSigBits_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(1, leastSigBits_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(2, mostSigBits_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) obj; - - boolean result = true; - result = result && (hasLeastSigBits() == other.hasLeastSigBits()); - if (hasLeastSigBits()) { - result = result && (getLeastSigBits() - == other.getLeastSigBits()); - } - result = result && (hasMostSigBits() == other.hasMostSigBits()); - if (hasMostSigBits()) { - result = result && (getMostSigBits() - == other.getMostSigBits()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasLeastSigBits()) { - hash = (37 * hash) + LEASTSIGBITS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLeastSigBits()); - } - if (hasMostSigBits()) { - hash = (37 * hash) + MOSTSIGBITS_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getMostSigBits()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_UUID_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - leastSigBits_ = 0L; - bitField0_ = (bitField0_ & ~0x00000001); - mostSigBits_ = 0L; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.leastSigBits_ = leastSigBits_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.mostSigBits_ = mostSigBits_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) return this; - if (other.hasLeastSigBits()) { - setLeastSigBits(other.getLeastSigBits()); - } - if (other.hasMostSigBits()) { - setMostSigBits(other.getMostSigBits()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasLeastSigBits()) { - - return false; - } - if (!hasMostSigBits()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 8: { - bitField0_ |= 0x00000001; - leastSigBits_ = input.readUInt64(); - break; - } - case 16: { - bitField0_ |= 0x00000002; - mostSigBits_ = input.readUInt64(); - break; - } - } - } - } - - private int bitField0_; - - // required uint64 leastSigBits = 1; - private long leastSigBits_ ; - public boolean hasLeastSigBits() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public long getLeastSigBits() { - return leastSigBits_; - } - public Builder setLeastSigBits(long value) { - bitField0_ |= 0x00000001; - leastSigBits_ = value; - onChanged(); - return this; - } - public Builder clearLeastSigBits() { - bitField0_ = (bitField0_ & ~0x00000001); - leastSigBits_ = 0L; - onChanged(); - return this; - } - - // required uint64 mostSigBits = 2; - private long mostSigBits_ ; - public boolean hasMostSigBits() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public long getMostSigBits() { - return mostSigBits_; - } - public Builder setMostSigBits(long value) { - bitField0_ |= 0x00000002; - mostSigBits_ = value; - onChanged(); - return this; - } - public Builder clearMostSigBits() { - bitField0_ = (bitField0_ & ~0x00000002); - mostSigBits_ = 0L; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:UUID) - } - - static { - defaultInstance = new UUID(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:UUID) - } - public interface WALEntryOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required .WALEntry.WALKey key = 1; + // required .WALKey key = 1; boolean hasKey(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder(); - // required .WALEntry.WALEdit edit = 2; - boolean hasEdit(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder(); + // repeated bytes keyValueBytes = 2; + java.util.List getKeyValueBytesList(); + int getKeyValueBytesCount(); + com.google.protobuf.ByteString getKeyValueBytes(int index); } public static final class WALEntry extends com.google.protobuf.GeneratedMessage @@ -10085,2045 +9632,37 @@ public final class AdminProtos { return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_fieldAccessorTable; } - public interface WALKeyOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes encodedRegionName = 1; - boolean hasEncodedRegionName(); - com.google.protobuf.ByteString getEncodedRegionName(); - - // required bytes tableName = 2; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - - // required uint64 logSequenceNumber = 3; - boolean hasLogSequenceNumber(); - long getLogSequenceNumber(); - - // required uint64 writeTime = 4; - boolean hasWriteTime(); - long getWriteTime(); - - // optional .UUID clusterId = 5; - boolean hasClusterId(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder(); - } - public static final class WALKey extends - com.google.protobuf.GeneratedMessage - implements WALKeyOrBuilder { - // Use WALKey.newBuilder() to construct. - private WALKey(Builder builder) { - super(builder); - } - private WALKey(boolean noInit) {} - - private static final WALKey defaultInstance; - public static WALKey getDefaultInstance() { - return defaultInstance; - } - - public WALKey getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; - } - - private int bitField0_; - // required bytes encodedRegionName = 1; - public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString encodedRegionName_; - public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getEncodedRegionName() { - return encodedRegionName_; - } - - // required bytes tableName = 2; - public static final int TABLENAME_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // required uint64 logSequenceNumber = 3; - public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3; - private long logSequenceNumber_; - public boolean hasLogSequenceNumber() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getLogSequenceNumber() { - return logSequenceNumber_; - } - - // required uint64 writeTime = 4; - public static final int WRITETIME_FIELD_NUMBER = 4; - private long writeTime_; - public boolean hasWriteTime() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getWriteTime() { - return writeTime_; - } - - // optional .UUID clusterId = 5; - public static final int CLUSTERID_FIELD_NUMBER = 5; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { - return clusterId_; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { - return clusterId_; - } - - private void initFields() { - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - tableName_ = com.google.protobuf.ByteString.EMPTY; - logSequenceNumber_ = 0L; - writeTime_ = 0L; - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasEncodedRegionName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasLogSequenceNumber()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasWriteTime()) { - memoizedIsInitialized = 0; - return false; - } - if (hasClusterId()) { - if (!getClusterId().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, encodedRegionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeUInt64(3, logSequenceNumber_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeUInt64(4, writeTime_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(5, clusterId_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, encodedRegionName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, tableName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(3, logSequenceNumber_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeUInt64Size(4, writeTime_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(5, clusterId_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) obj; - - boolean result = true; - result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); - if (hasEncodedRegionName()) { - result = result && getEncodedRegionName() - .equals(other.getEncodedRegionName()); - } - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); - if (hasLogSequenceNumber()) { - result = result && (getLogSequenceNumber() - == other.getLogSequenceNumber()); - } - result = result && (hasWriteTime() == other.hasWriteTime()); - if (hasWriteTime()) { - result = result && (getWriteTime() - == other.getWriteTime()); - } - result = result && (hasClusterId() == other.hasClusterId()); - if (hasClusterId()) { - result = result && getClusterId() - .equals(other.getClusterId()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasEncodedRegionName()) { - hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getEncodedRegionName().hashCode(); - } - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - if (hasLogSequenceNumber()) { - hash = (37 * hash) + LOGSEQUENCENUMBER_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getLogSequenceNumber()); - } - if (hasWriteTime()) { - hash = (37 * hash) + WRITETIME_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getWriteTime()); - } - if (hasClusterId()) { - hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; - hash = (53 * hash) + getClusterId().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALKey_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getClusterIdFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - logSequenceNumber_ = 0L; - bitField0_ = (bitField0_ & ~0x00000004); - writeTime_ = 0L; - bitField0_ = (bitField0_ & ~0x00000008); - if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - } else { - clusterIdBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.encodedRegionName_ = encodedRegionName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.logSequenceNumber_ = logSequenceNumber_; - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.writeTime_ = writeTime_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - if (clusterIdBuilder_ == null) { - result.clusterId_ = clusterId_; - } else { - result.clusterId_ = clusterIdBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) return this; - if (other.hasEncodedRegionName()) { - setEncodedRegionName(other.getEncodedRegionName()); - } - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - if (other.hasLogSequenceNumber()) { - setLogSequenceNumber(other.getLogSequenceNumber()); - } - if (other.hasWriteTime()) { - setWriteTime(other.getWriteTime()); - } - if (other.hasClusterId()) { - mergeClusterId(other.getClusterId()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasEncodedRegionName()) { - - return false; - } - if (!hasTableName()) { - - return false; - } - if (!hasLogSequenceNumber()) { - - return false; - } - if (!hasWriteTime()) { - - return false; - } - if (hasClusterId()) { - if (!getClusterId().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - encodedRegionName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - tableName_ = input.readBytes(); - break; - } - case 24: { - bitField0_ |= 0x00000004; - logSequenceNumber_ = input.readUInt64(); - break; - } - case 32: { - bitField0_ |= 0x00000008; - writeTime_ = input.readUInt64(); - break; - } - case 42: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(); - if (hasClusterId()) { - subBuilder.mergeFrom(getClusterId()); - } - input.readMessage(subBuilder, extensionRegistry); - setClusterId(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // required bytes encodedRegionName = 1; - private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getEncodedRegionName() { - return encodedRegionName_; - } - public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - encodedRegionName_ = value; - onChanged(); - return this; - } - public Builder clearEncodedRegionName() { - bitField0_ = (bitField0_ & ~0x00000001); - encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); - onChanged(); - return this; - } - - // required bytes tableName = 2; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000002); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // required uint64 logSequenceNumber = 3; - private long logSequenceNumber_ ; - public boolean hasLogSequenceNumber() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public long getLogSequenceNumber() { - return logSequenceNumber_; - } - public Builder setLogSequenceNumber(long value) { - bitField0_ |= 0x00000004; - logSequenceNumber_ = value; - onChanged(); - return this; - } - public Builder clearLogSequenceNumber() { - bitField0_ = (bitField0_ & ~0x00000004); - logSequenceNumber_ = 0L; - onChanged(); - return this; - } - - // required uint64 writeTime = 4; - private long writeTime_ ; - public boolean hasWriteTime() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public long getWriteTime() { - return writeTime_; - } - public Builder setWriteTime(long value) { - bitField0_ |= 0x00000008; - writeTime_ = value; - onChanged(); - return this; - } - public Builder clearWriteTime() { - bitField0_ = (bitField0_ & ~0x00000008); - writeTime_ = 0L; - onChanged(); - return this; - } - - // optional .UUID clusterId = 5; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> clusterIdBuilder_; - public boolean hasClusterId() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID getClusterId() { - if (clusterIdBuilder_ == null) { - return clusterId_; - } else { - return clusterIdBuilder_.getMessage(); - } - } - public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { - if (clusterIdBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - clusterId_ = value; - onChanged(); - } else { - clusterIdBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder setClusterId( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder builderForValue) { - if (clusterIdBuilder_ == null) { - clusterId_ = builderForValue.build(); - onChanged(); - } else { - clusterIdBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID value) { - if (clusterIdBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - clusterId_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance()) { - clusterId_ = - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); - } else { - clusterId_ = value; - } - onChanged(); - } else { - clusterIdBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - public Builder clearClusterId() { - if (clusterIdBuilder_ == null) { - clusterId_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.getDefaultInstance(); - onChanged(); - } else { - clusterIdBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder getClusterIdBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getClusterIdFieldBuilder().getBuilder(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder getClusterIdOrBuilder() { - if (clusterIdBuilder_ != null) { - return clusterIdBuilder_.getMessageOrBuilder(); - } else { - return clusterId_; - } - } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder> - getClusterIdFieldBuilder() { - if (clusterIdBuilder_ == null) { - clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUIDOrBuilder>( - clusterId_, - getParentForChildren(), - isClean()); - clusterId_ = null; - } - return clusterIdBuilder_; - } - - // @@protoc_insertion_point(builder_scope:WALEntry.WALKey) - } - - static { - defaultInstance = new WALKey(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry.WALKey) - } - - public interface WALEditOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // repeated bytes keyValueBytes = 1; - java.util.List getKeyValueBytesList(); - int getKeyValueBytesCount(); - com.google.protobuf.ByteString getKeyValueBytes(int index); - - // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - java.util.List - getFamilyScopeList(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index); - int getFamilyScopeCount(); - java.util.List - getFamilyScopeOrBuilderList(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( - int index); - } - public static final class WALEdit extends - com.google.protobuf.GeneratedMessage - implements WALEditOrBuilder { - // Use WALEdit.newBuilder() to construct. - private WALEdit(Builder builder) { - super(builder); - } - private WALEdit(boolean noInit) {} - - private static final WALEdit defaultInstance; - public static WALEdit getDefaultInstance() { - return defaultInstance; - } - - public WALEdit getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; - } - - public enum ScopeType - implements com.google.protobuf.ProtocolMessageEnum { - REPLICATION_SCOPE_LOCAL(0, 0), - REPLICATION_SCOPE_GLOBAL(1, 1), - ; - - public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; - public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; - - - public final int getNumber() { return value; } - - public static ScopeType valueOf(int value) { - switch (value) { - case 0: return REPLICATION_SCOPE_LOCAL; - case 1: return REPLICATION_SCOPE_GLOBAL; - default: return null; - } - } - - public static com.google.protobuf.Internal.EnumLiteMap - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap() { - public ScopeType findValueByNumber(int number) { - return ScopeType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor().getEnumTypes().get(0); - } - - private static final ScopeType[] VALUES = { - REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL, - }; - - public static ScopeType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; - } - - private final int index; - private final int value; - - private ScopeType(int index, int value) { - this.index = index; - this.value = value; - } - - // @@protoc_insertion_point(enum_scope:WALEntry.WALEdit.ScopeType) - } - - public interface FamilyScopeOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes family = 1; - boolean hasFamily(); - com.google.protobuf.ByteString getFamily(); - - // required .WALEntry.WALEdit.ScopeType scopeType = 2; - boolean hasScopeType(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType(); - } - public static final class FamilyScope extends - com.google.protobuf.GeneratedMessage - implements FamilyScopeOrBuilder { - // Use FamilyScope.newBuilder() to construct. - private FamilyScope(Builder builder) { - super(builder); - } - private FamilyScope(boolean noInit) {} - - private static final FamilyScope defaultInstance; - public static FamilyScope getDefaultInstance() { - return defaultInstance; - } - - public FamilyScope getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; - } - - private int bitField0_; - // required bytes family = 1; - public static final int FAMILY_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString family_; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - - // required .WALEntry.WALEdit.ScopeType scopeType = 2; - public static final int SCOPETYPE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_; - public boolean hasScopeType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { - return scopeType_; - } - - private void initFields() { - family_ = com.google.protobuf.ByteString.EMPTY; - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasFamily()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasScopeType()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, scopeType_.getNumber()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, family_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, scopeType_.getNumber()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) obj; - - boolean result = true; - result = result && (hasFamily() == other.hasFamily()); - if (hasFamily()) { - result = result && getFamily() - .equals(other.getFamily()); - } - result = result && (hasScopeType() == other.hasScopeType()); - if (hasScopeType()) { - result = result && - (getScopeType() == other.getScopeType()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasFamily()) { - hash = (37 * hash) + FAMILY_FIELD_NUMBER; - hash = (53 * hash) + getFamily().hashCode(); - } - if (hasScopeType()) { - hash = (37 * hash) + SCOPETYPE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getScopeType()); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - family_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.family_ = family_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.scopeType_ = scopeType_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()) return this; - if (other.hasFamily()) { - setFamily(other.getFamily()); - } - if (other.hasScopeType()) { - setScopeType(other.getScopeType()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasFamily()) { - - return false; - } - if (!hasScopeType()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - family_ = input.readBytes(); - break; - } - case 16: { - int rawValue = input.readEnum(); - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(2, rawValue); - } else { - bitField0_ |= 0x00000002; - scopeType_ = value; - } - break; - } - } - } - } - - private int bitField0_; - - // required bytes family = 1; - private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamily() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getFamily() { - return family_; - } - public Builder setFamily(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - family_ = value; - onChanged(); - return this; - } - public Builder clearFamily() { - bitField0_ = (bitField0_ & ~0x00000001); - family_ = getDefaultInstance().getFamily(); - onChanged(); - return this; - } - - // required .WALEntry.WALEdit.ScopeType scopeType = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - public boolean hasScopeType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType getScopeType() { - return scopeType_; - } - public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - scopeType_ = value; - onChanged(); - return this; - } - public Builder clearScopeType() { - bitField0_ = (bitField0_ & ~0x00000002); - scopeType_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.ScopeType.REPLICATION_SCOPE_LOCAL; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit.FamilyScope) - } - - static { - defaultInstance = new FamilyScope(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry.WALEdit.FamilyScope) - } - - // repeated bytes keyValueBytes = 1; - public static final int KEYVALUEBYTES_FIELD_NUMBER = 1; - private java.util.List keyValueBytes_; - public java.util.List - getKeyValueBytesList() { - return keyValueBytes_; - } - public int getKeyValueBytesCount() { - return keyValueBytes_.size(); - } - public com.google.protobuf.ByteString getKeyValueBytes(int index) { - return keyValueBytes_.get(index); - } - - // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - public static final int FAMILYSCOPE_FIELD_NUMBER = 2; - private java.util.List familyScope_; - public java.util.List getFamilyScopeList() { - return familyScope_; - } - public java.util.List - getFamilyScopeOrBuilderList() { - return familyScope_; - } - public int getFamilyScopeCount() { - return familyScope_.size(); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { - return familyScope_.get(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( - int index) { - return familyScope_.get(index); - } - - private void initFields() { - keyValueBytes_ = java.util.Collections.emptyList();; - familyScope_ = java.util.Collections.emptyList(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - for (int i = 0; i < getFamilyScopeCount(); i++) { - if (!getFamilyScope(i).isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - for (int i = 0; i < keyValueBytes_.size(); i++) { - output.writeBytes(1, keyValueBytes_.get(i)); - } - for (int i = 0; i < familyScope_.size(); i++) { - output.writeMessage(2, familyScope_.get(i)); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - { - int dataSize = 0; - for (int i = 0; i < keyValueBytes_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(keyValueBytes_.get(i)); - } - size += dataSize; - size += 1 * getKeyValueBytesList().size(); - } - for (int i = 0; i < familyScope_.size(); i++) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, familyScope_.get(i)); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) obj; - - boolean result = true; - result = result && getKeyValueBytesList() - .equals(other.getKeyValueBytesList()); - result = result && getFamilyScopeList() - .equals(other.getFamilyScopeList()); - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (getKeyValueBytesCount() > 0) { - hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER; - hash = (53 * hash) + getKeyValueBytesList().hashCode(); - } - if (getFamilyScopeCount() > 0) { - hash = (37 * hash) + FAMILYSCOPE_FIELD_NUMBER; - hash = (53 * hash) + getFamilyScopeList().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WALEntry_WALEdit_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getFamilyScopeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - keyValueBytes_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - if (familyScopeBuilder_ == null) { - familyScope_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - } else { - familyScopeBuilder_.clear(); - } - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit build() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit(this); - int from_bitField0_ = bitField0_; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); - bitField0_ = (bitField0_ & ~0x00000001); - } - result.keyValueBytes_ = keyValueBytes_; - if (familyScopeBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002)) { - familyScope_ = java.util.Collections.unmodifiableList(familyScope_); - bitField0_ = (bitField0_ & ~0x00000002); - } - result.familyScope_ = familyScope_; - } else { - result.familyScope_ = familyScopeBuilder_.build(); - } - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) return this; - if (!other.keyValueBytes_.isEmpty()) { - if (keyValueBytes_.isEmpty()) { - keyValueBytes_ = other.keyValueBytes_; - bitField0_ = (bitField0_ & ~0x00000001); - } else { - ensureKeyValueBytesIsMutable(); - keyValueBytes_.addAll(other.keyValueBytes_); - } - onChanged(); - } - if (familyScopeBuilder_ == null) { - if (!other.familyScope_.isEmpty()) { - if (familyScope_.isEmpty()) { - familyScope_ = other.familyScope_; - bitField0_ = (bitField0_ & ~0x00000002); - } else { - ensureFamilyScopeIsMutable(); - familyScope_.addAll(other.familyScope_); - } - onChanged(); - } - } else { - if (!other.familyScope_.isEmpty()) { - if (familyScopeBuilder_.isEmpty()) { - familyScopeBuilder_.dispose(); - familyScopeBuilder_ = null; - familyScope_ = other.familyScope_; - bitField0_ = (bitField0_ & ~0x00000002); - familyScopeBuilder_ = - com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? - getFamilyScopeFieldBuilder() : null; - } else { - familyScopeBuilder_.addAllMessages(other.familyScope_); - } - } - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - for (int i = 0; i < getFamilyScopeCount(); i++) { - if (!getFamilyScope(i).isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - ensureKeyValueBytesIsMutable(); - keyValueBytes_.add(input.readBytes()); - break; - } - case 18: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.newBuilder(); - input.readMessage(subBuilder, extensionRegistry); - addFamilyScope(subBuilder.buildPartial()); - break; - } - } - } - } - - private int bitField0_; - - // repeated bytes keyValueBytes = 1; - private java.util.List keyValueBytes_ = java.util.Collections.emptyList();; - private void ensureKeyValueBytesIsMutable() { - if (!((bitField0_ & 0x00000001) == 0x00000001)) { - keyValueBytes_ = new java.util.ArrayList(keyValueBytes_); - bitField0_ |= 0x00000001; - } - } - public java.util.List - getKeyValueBytesList() { - return java.util.Collections.unmodifiableList(keyValueBytes_); - } - public int getKeyValueBytesCount() { - return keyValueBytes_.size(); - } - public com.google.protobuf.ByteString getKeyValueBytes(int index) { - return keyValueBytes_.get(index); - } - public Builder setKeyValueBytes( - int index, com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueBytesIsMutable(); - keyValueBytes_.set(index, value); - onChanged(); - return this; - } - public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - ensureKeyValueBytesIsMutable(); - keyValueBytes_.add(value); - onChanged(); - return this; - } - public Builder addAllKeyValueBytes( - java.lang.Iterable values) { - ensureKeyValueBytesIsMutable(); - super.addAll(values, keyValueBytes_); - onChanged(); - return this; - } - public Builder clearKeyValueBytes() { - keyValueBytes_ = java.util.Collections.emptyList();; - bitField0_ = (bitField0_ & ~0x00000001); - onChanged(); - return this; - } - - // repeated .WALEntry.WALEdit.FamilyScope familyScope = 2; - private java.util.List familyScope_ = - java.util.Collections.emptyList(); - private void ensureFamilyScopeIsMutable() { - if (!((bitField0_ & 0x00000002) == 0x00000002)) { - familyScope_ = new java.util.ArrayList(familyScope_); - bitField0_ |= 0x00000002; - } - } - - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> familyScopeBuilder_; - - public java.util.List getFamilyScopeList() { - if (familyScopeBuilder_ == null) { - return java.util.Collections.unmodifiableList(familyScope_); - } else { - return familyScopeBuilder_.getMessageList(); - } - } - public int getFamilyScopeCount() { - if (familyScopeBuilder_ == null) { - return familyScope_.size(); - } else { - return familyScopeBuilder_.getCount(); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope getFamilyScope(int index) { - if (familyScopeBuilder_ == null) { - return familyScope_.get(index); - } else { - return familyScopeBuilder_.getMessage(index); - } - } - public Builder setFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { - if (familyScopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyScopeIsMutable(); - familyScope_.set(index, value); - onChanged(); - } else { - familyScopeBuilder_.setMessage(index, value); - } - return this; - } - public Builder setFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.set(index, builderForValue.build()); - onChanged(); - } else { - familyScopeBuilder_.setMessage(index, builderForValue.build()); - } - return this; - } - public Builder addFamilyScope(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { - if (familyScopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyScopeIsMutable(); - familyScope_.add(value); - onChanged(); - } else { - familyScopeBuilder_.addMessage(value); - } - return this; - } - public Builder addFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope value) { - if (familyScopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ensureFamilyScopeIsMutable(); - familyScope_.add(index, value); - onChanged(); - } else { - familyScopeBuilder_.addMessage(index, value); - } - return this; - } - public Builder addFamilyScope( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.add(builderForValue.build()); - onChanged(); - } else { - familyScopeBuilder_.addMessage(builderForValue.build()); - } - return this; - } - public Builder addFamilyScope( - int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder builderForValue) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.add(index, builderForValue.build()); - onChanged(); - } else { - familyScopeBuilder_.addMessage(index, builderForValue.build()); - } - return this; - } - public Builder addAllFamilyScope( - java.lang.Iterable values) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - super.addAll(values, familyScope_); - onChanged(); - } else { - familyScopeBuilder_.addAllMessages(values); - } - return this; - } - public Builder clearFamilyScope() { - if (familyScopeBuilder_ == null) { - familyScope_ = java.util.Collections.emptyList(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - } else { - familyScopeBuilder_.clear(); - } - return this; - } - public Builder removeFamilyScope(int index) { - if (familyScopeBuilder_ == null) { - ensureFamilyScopeIsMutable(); - familyScope_.remove(index); - onChanged(); - } else { - familyScopeBuilder_.remove(index); - } - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder getFamilyScopeBuilder( - int index) { - return getFamilyScopeFieldBuilder().getBuilder(index); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder getFamilyScopeOrBuilder( - int index) { - if (familyScopeBuilder_ == null) { - return familyScope_.get(index); } else { - return familyScopeBuilder_.getMessageOrBuilder(index); - } - } - public java.util.List - getFamilyScopeOrBuilderList() { - if (familyScopeBuilder_ != null) { - return familyScopeBuilder_.getMessageOrBuilderList(); - } else { - return java.util.Collections.unmodifiableList(familyScope_); - } - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder() { - return getFamilyScopeFieldBuilder().addBuilder( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder addFamilyScopeBuilder( - int index) { - return getFamilyScopeFieldBuilder().addBuilder( - index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.getDefaultInstance()); - } - public java.util.List - getFamilyScopeBuilderList() { - return getFamilyScopeFieldBuilder().getBuilderList(); - } - private com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder> - getFamilyScopeFieldBuilder() { - if (familyScopeBuilder_ == null) { - familyScopeBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScopeOrBuilder>( - familyScope_, - ((bitField0_ & 0x00000002) == 0x00000002), - getParentForChildren(), - isClean()); - familyScope_ = null; - } - return familyScopeBuilder_; - } - - // @@protoc_insertion_point(builder_scope:WALEntry.WALEdit) - } - - static { - defaultInstance = new WALEdit(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:WALEntry.WALEdit) - } - private int bitField0_; - // required .WALEntry.WALKey key = 1; + // required .WALKey key = 1; public static final int KEY_FIELD_NUMBER = 1; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_; + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey key_; public boolean hasKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() { + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey() { return key_; } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder() { return key_; } - // required .WALEntry.WALEdit edit = 2; - public static final int EDIT_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_; - public boolean hasEdit() { - return ((bitField0_ & 0x00000002) == 0x00000002); + // repeated bytes keyValueBytes = 2; + public static final int KEYVALUEBYTES_FIELD_NUMBER = 2; + private java.util.List keyValueBytes_; + public java.util.List + getKeyValueBytesList() { + return keyValueBytes_; } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { - return edit_; + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { - return edit_; + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); } private void initFields() { - key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); - edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); + key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); + keyValueBytes_ = java.util.Collections.emptyList();; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { @@ -12134,18 +9673,10 @@ public final class AdminProtos { memoizedIsInitialized = 0; return false; } - if (!hasEdit()) { - memoizedIsInitialized = 0; - return false; - } if (!getKey().isInitialized()) { memoizedIsInitialized = 0; return false; } - if (!getEdit().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } memoizedIsInitialized = 1; return true; } @@ -12156,8 +9687,8 @@ public final class AdminProtos { if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, key_); } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, edit_); + for (int i = 0; i < keyValueBytes_.size(); i++) { + output.writeBytes(2, keyValueBytes_.get(i)); } getUnknownFields().writeTo(output); } @@ -12172,9 +9703,14 @@ public final class AdminProtos { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, key_); } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, edit_); + { + int dataSize = 0; + for (int i = 0; i < keyValueBytes_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(keyValueBytes_.get(i)); + } + size += dataSize; + size += 1 * getKeyValueBytesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -12204,11 +9740,8 @@ public final class AdminProtos { result = result && getKey() .equals(other.getKey()); } - result = result && (hasEdit() == other.hasEdit()); - if (hasEdit()) { - result = result && getEdit() - .equals(other.getEdit()); - } + result = result && getKeyValueBytesList() + .equals(other.getKeyValueBytesList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; @@ -12222,9 +9755,9 @@ public final class AdminProtos { hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); } - if (hasEdit()) { - hash = (37 * hash) + EDIT_FIELD_NUMBER; - hash = (53 * hash) + getEdit().hashCode(); + if (getKeyValueBytesCount() > 0) { + hash = (37 * hash) + KEYVALUEBYTES_FIELD_NUMBER; + hash = (53 * hash) + getKeyValueBytesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; @@ -12335,7 +9868,6 @@ public final class AdminProtos { private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { getKeyFieldBuilder(); - getEditFieldBuilder(); } } private static Builder create() { @@ -12345,16 +9877,12 @@ public final class AdminProtos { public Builder clear() { super.clear(); if (keyBuilder_ == null) { - key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); } else { keyBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); - if (editBuilder_ == null) { - edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); - } else { - editBuilder_.clear(); - } + keyValueBytes_ = java.util.Collections.emptyList();; bitField0_ = (bitField0_ & ~0x00000002); return this; } @@ -12402,14 +9930,11 @@ public final class AdminProtos { } else { result.key_ = keyBuilder_.build(); } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (editBuilder_ == null) { - result.edit_ = edit_; - } else { - result.edit_ = editBuilder_.build(); + if (((bitField0_ & 0x00000002) == 0x00000002)) { + keyValueBytes_ = java.util.Collections.unmodifiableList(keyValueBytes_); + bitField0_ = (bitField0_ & ~0x00000002); } + result.keyValueBytes_ = keyValueBytes_; result.bitField0_ = to_bitField0_; onBuilt(); return result; @@ -12429,8 +9954,15 @@ public final class AdminProtos { if (other.hasKey()) { mergeKey(other.getKey()); } - if (other.hasEdit()) { - mergeEdit(other.getEdit()); + if (!other.keyValueBytes_.isEmpty()) { + if (keyValueBytes_.isEmpty()) { + keyValueBytes_ = other.keyValueBytes_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureKeyValueBytesIsMutable(); + keyValueBytes_.addAll(other.keyValueBytes_); + } + onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; @@ -12441,18 +9973,10 @@ public final class AdminProtos { return false; } - if (!hasEdit()) { - - return false; - } if (!getKey().isInitialized()) { return false; } - if (!getEdit().isInitialized()) { - - return false; - } return true; } @@ -12480,7 +10004,7 @@ public final class AdminProtos { break; } case 10: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder(); if (hasKey()) { subBuilder.mergeFrom(getKey()); } @@ -12489,12 +10013,8 @@ public final class AdminProtos { break; } case 18: { - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(); - if (hasEdit()) { - subBuilder.mergeFrom(getEdit()); - } - input.readMessage(subBuilder, extensionRegistry); - setEdit(subBuilder.buildPartial()); + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(input.readBytes()); break; } } @@ -12503,21 +10023,21 @@ public final class AdminProtos { private int bitField0_; - // required .WALEntry.WALKey key = 1; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + // required .WALKey key = 1; + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> keyBuilder_; + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder> keyBuilder_; public boolean hasKey() { return ((bitField0_ & 0x00000001) == 0x00000001); } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey getKey() { + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getKey() { if (keyBuilder_ == null) { return key_; } else { return keyBuilder_.getMessage(); } } - public Builder setKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { + public Builder setKey(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey value) { if (keyBuilder_ == null) { if (value == null) { throw new NullPointerException(); @@ -12531,7 +10051,7 @@ public final class AdminProtos { return this; } public Builder setKey( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder builderForValue) { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder builderForValue) { if (keyBuilder_ == null) { key_ = builderForValue.build(); onChanged(); @@ -12541,12 +10061,12 @@ public final class AdminProtos { bitField0_ |= 0x00000001; return this; } - public Builder mergeKey(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey value) { + public Builder mergeKey(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey value) { if (keyBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && - key_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance()) { + key_ != org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) { key_ = - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.newBuilder(key_).mergeFrom(value).buildPartial(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder(key_).mergeFrom(value).buildPartial(); } else { key_ = value; } @@ -12559,7 +10079,7 @@ public final class AdminProtos { } public Builder clearKey() { if (keyBuilder_ == null) { - key_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.getDefaultInstance(); + key_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); onChanged(); } else { keyBuilder_.clear(); @@ -12567,12 +10087,12 @@ public final class AdminProtos { bitField0_ = (bitField0_ & ~0x00000001); return this; } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder getKeyBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder getKeyBuilder() { bitField0_ |= 0x00000001; onChanged(); return getKeyFieldBuilder().getBuilder(); } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder getKeyOrBuilder() { + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder getKeyOrBuilder() { if (keyBuilder_ != null) { return keyBuilder_.getMessageOrBuilder(); } else { @@ -12580,11 +10100,11 @@ public final class AdminProtos { } } private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder> + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder> getKeyFieldBuilder() { if (keyBuilder_ == null) { keyBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKeyOrBuilder>( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder>( key_, getParentForChildren(), isClean()); @@ -12593,94 +10113,55 @@ public final class AdminProtos { return keyBuilder_; } - // required .WALEntry.WALEdit edit = 2; - private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> editBuilder_; - public boolean hasEdit() { - return ((bitField0_ & 0x00000002) == 0x00000002); + // repeated bytes keyValueBytes = 2; + private java.util.List keyValueBytes_ = java.util.Collections.emptyList();; + private void ensureKeyValueBytesIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + keyValueBytes_ = new java.util.ArrayList(keyValueBytes_); + bitField0_ |= 0x00000002; + } } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit getEdit() { - if (editBuilder_ == null) { - return edit_; - } else { - return editBuilder_.getMessage(); - } + public java.util.List + getKeyValueBytesList() { + return java.util.Collections.unmodifiableList(keyValueBytes_); } - public Builder setEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { - if (editBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - edit_ = value; - onChanged(); - } else { - editBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; + public int getKeyValueBytesCount() { + return keyValueBytes_.size(); } - public Builder setEdit( - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder builderForValue) { - if (editBuilder_ == null) { - edit_ = builderForValue.build(); - onChanged(); - } else { - editBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; + public com.google.protobuf.ByteString getKeyValueBytes(int index) { + return keyValueBytes_.get(index); } - public Builder mergeEdit(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit value) { - if (editBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - edit_ != org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance()) { - edit_ = - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.newBuilder(edit_).mergeFrom(value).buildPartial(); - } else { - edit_ = value; - } - onChanged(); - } else { - editBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - public Builder clearEdit() { - if (editBuilder_ == null) { - edit_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.getDefaultInstance(); - onChanged(); - } else { - editBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder getEditBuilder() { - bitField0_ |= 0x00000002; + public Builder setKeyValueBytes( + int index, com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.set(index, value); onChanged(); - return getEditFieldBuilder().getBuilder(); + return this; } - public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder getEditOrBuilder() { - if (editBuilder_ != null) { - return editBuilder_.getMessageOrBuilder(); - } else { - return edit_; - } + public Builder addKeyValueBytes(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + ensureKeyValueBytesIsMutable(); + keyValueBytes_.add(value); + onChanged(); + return this; } - private com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder> - getEditFieldBuilder() { - if (editBuilder_ == null) { - editBuilder_ = new com.google.protobuf.SingleFieldBuilder< - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEditOrBuilder>( - edit_, - getParentForChildren(), - isClean()); - edit_ = null; - } - return editBuilder_; + public Builder addAllKeyValueBytes( + java.lang.Iterable values) { + ensureKeyValueBytesIsMutable(); + super.addAll(values, keyValueBytes_); + onChanged(); + return this; + } + public Builder clearKeyValueBytes() { + keyValueBytes_ = java.util.Collections.emptyList();; + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; } // @@protoc_insertion_point(builder_scope:WALEntry) @@ -17424,31 +14905,11 @@ public final class AdminProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_MergeRegionsResponse_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_UUID_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_UUID_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_WALEntry_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_WALEntry_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_WALKey_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_WALKey_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_WALEdit_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_WALEdit_fieldAccessorTable; - private static com.google.protobuf.Descriptors.Descriptor - internal_static_WALEntry_WALEdit_FamilyScope_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_ReplicateWALEntryRequest_descriptor; private static @@ -17503,88 +14964,77 @@ public final class AdminProtos { descriptor; static { java.lang.String[] descriptorData = { - "\n\013Admin.proto\032\013hbase.proto\"Q\n\024GetRegionI" + - "nfoRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpeci" + - "fier\022\027\n\017compactionState\030\002 \001(\010\"\301\001\n\025GetReg" + - "ionInfoResponse\022\037\n\nregionInfo\030\001 \002(\0132\013.Re" + - "gionInfo\022?\n\017compactionState\030\002 \001(\0162&.GetR" + - "egionInfoResponse.CompactionState\"F\n\017Com" + - "pactionState\022\010\n\004NONE\020\000\022\t\n\005MINOR\020\001\022\t\n\005MAJ" + - "OR\020\002\022\023\n\017MAJOR_AND_MINOR\020\003\"G\n\023GetStoreFil" + - "eRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpecifi" + - "er\022\016\n\006family\030\002 \003(\014\")\n\024GetStoreFileRespon", - "se\022\021\n\tstoreFile\030\001 \003(\t\"\030\n\026GetOnlineRegion" + - "Request\":\n\027GetOnlineRegionResponse\022\037\n\nre" + - "gionInfo\030\001 \003(\0132\013.RegionInfo\"\225\001\n\021OpenRegi" + - "onRequest\0223\n\010openInfo\030\001 \003(\0132!.OpenRegion" + - "Request.RegionOpenInfo\032K\n\016RegionOpenInfo" + - "\022\033\n\006region\030\001 \002(\0132\013.RegionInfo\022\034\n\024version" + - "OfOfflineNode\030\002 \001(\r\"\234\001\n\022OpenRegionRespon" + - "se\022<\n\014openingState\030\001 \003(\0162&.OpenRegionRes" + - "ponse.RegionOpeningState\"H\n\022RegionOpenin" + - "gState\022\n\n\006OPENED\020\000\022\022\n\016ALREADY_OPENED\020\001\022\022", - "\n\016FAILED_OPENING\020\002\"\232\001\n\022CloseRegionReques" + - "t\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\034\n\024v" + - "ersionOfClosingNode\030\002 \001(\r\022\034\n\016transitionI" + - "nZK\030\003 \001(\010:\004true\022&\n\021destinationServer\030\004 \001" + - "(\0132\013.ServerName\"%\n\023CloseRegionResponse\022\016" + - "\n\006closed\030\001 \002(\010\"M\n\022FlushRegionRequest\022 \n\006" + - "region\030\001 \002(\0132\020.RegionSpecifier\022\025\n\rifOlde" + - "rThanTs\030\002 \001(\004\"=\n\023FlushRegionResponse\022\025\n\r" + - "lastFlushTime\030\001 \002(\004\022\017\n\007flushed\030\002 \001(\010\"J\n\022" + - "SplitRegionRequest\022 \n\006region\030\001 \002(\0132\020.Reg", - "ionSpecifier\022\022\n\nsplitPoint\030\002 \001(\014\"\025\n\023Spli" + - "tRegionResponse\"W\n\024CompactRegionRequest\022" + - " \n\006region\030\001 \002(\0132\020.RegionSpecifier\022\r\n\005maj" + - "or\030\002 \001(\010\022\016\n\006family\030\003 \001(\014\"\027\n\025CompactRegio" + - "nResponse\"t\n\023MergeRegionsRequest\022!\n\007regi" + - "onA\030\001 \002(\0132\020.RegionSpecifier\022!\n\007regionB\030\002" + - " \002(\0132\020.RegionSpecifier\022\027\n\010forcible\030\003 \001(\010" + - ":\005false\"\026\n\024MergeRegionsResponse\"1\n\004UUID\022" + - "\024\n\014leastSigBits\030\001 \002(\004\022\023\n\013mostSigBits\030\002 \002" + - "(\004\"\270\003\n\010WALEntry\022\035\n\003key\030\001 \002(\0132\020.WALEntry.", - "WALKey\022\037\n\004edit\030\002 \002(\0132\021.WALEntry.WALEdit\032" + - "~\n\006WALKey\022\031\n\021encodedRegionName\030\001 \002(\014\022\021\n\t" + - "tableName\030\002 \002(\014\022\031\n\021logSequenceNumber\030\003 \002" + - "(\004\022\021\n\twriteTime\030\004 \002(\004\022\030\n\tclusterId\030\005 \001(\013" + - "2\005.UUID\032\353\001\n\007WALEdit\022\025\n\rkeyValueBytes\030\001 \003" + - "(\014\0222\n\013familyScope\030\002 \003(\0132\035.WALEntry.WALEd" + - "it.FamilyScope\032M\n\013FamilyScope\022\016\n\006family\030" + - "\001 \002(\014\022.\n\tscopeType\030\002 \002(\0162\033.WALEntry.WALE" + - "dit.ScopeType\"F\n\tScopeType\022\033\n\027REPLICATIO" + - "N_SCOPE_LOCAL\020\000\022\034\n\030REPLICATION_SCOPE_GLO", - "BAL\020\001\"4\n\030ReplicateWALEntryRequest\022\030\n\005ent" + - "ry\030\001 \003(\0132\t.WALEntry\"\033\n\031ReplicateWALEntry" + - "Response\"\026\n\024RollWALWriterRequest\".\n\025Roll" + - "WALWriterResponse\022\025\n\rregionToFlush\030\001 \003(\014" + - "\"#\n\021StopServerRequest\022\016\n\006reason\030\001 \002(\t\"\024\n" + - "\022StopServerResponse\"\026\n\024GetServerInfoRequ" + - "est\"@\n\nServerInfo\022\037\n\nserverName\030\001 \002(\0132\013." + - "ServerName\022\021\n\twebuiPort\030\002 \001(\r\"8\n\025GetServ" + - "erInfoResponse\022\037\n\nserverInfo\030\001 \002(\0132\013.Ser" + - "verInfo2\266\006\n\014AdminService\022>\n\rgetRegionInf", - "o\022\025.GetRegionInfoRequest\032\026.GetRegionInfo" + - "Response\022;\n\014getStoreFile\022\024.GetStoreFileR" + - "equest\032\025.GetStoreFileResponse\022D\n\017getOnli" + - "neRegion\022\027.GetOnlineRegionRequest\032\030.GetO" + - "nlineRegionResponse\0225\n\nopenRegion\022\022.Open" + - "RegionRequest\032\023.OpenRegionResponse\0228\n\013cl" + - "oseRegion\022\023.CloseRegionRequest\032\024.CloseRe" + - "gionResponse\0228\n\013flushRegion\022\023.FlushRegio" + - "nRequest\032\024.FlushRegionResponse\0228\n\013splitR" + - "egion\022\023.SplitRegionRequest\032\024.SplitRegion", - "Response\022>\n\rcompactRegion\022\025.CompactRegio" + - "nRequest\032\026.CompactRegionResponse\022;\n\014merg" + - "eRegions\022\024.MergeRegionsRequest\032\025.MergeRe" + - "gionsResponse\022J\n\021replicateWALEntry\022\031.Rep" + - "licateWALEntryRequest\032\032.ReplicateWALEntr" + - "yResponse\022>\n\rrollWALWriter\022\025.RollWALWrit" + - "erRequest\032\026.RollWALWriterResponse\022>\n\rget" + - "ServerInfo\022\025.GetServerInfoRequest\032\026.GetS" + - "erverInfoResponse\0225\n\nstopServer\022\022.StopSe" + - "rverRequest\032\023.StopServerResponseBA\n*org.", - "apache.hadoop.hbase.protobuf.generatedB\013" + - "AdminProtosH\001\210\001\001\240\001\001" + "\n\013Admin.proto\032\013hbase.proto\032\tWAL.proto\"Q\n" + + "\024GetRegionInfoRequest\022 \n\006region\030\001 \002(\0132\020." + + "RegionSpecifier\022\027\n\017compactionState\030\002 \001(\010" + + "\"\301\001\n\025GetRegionInfoResponse\022\037\n\nregionInfo" + + "\030\001 \002(\0132\013.RegionInfo\022?\n\017compactionState\030\002" + + " \001(\0162&.GetRegionInfoResponse.CompactionS" + + "tate\"F\n\017CompactionState\022\010\n\004NONE\020\000\022\t\n\005MIN" + + "OR\020\001\022\t\n\005MAJOR\020\002\022\023\n\017MAJOR_AND_MINOR\020\003\"G\n\023" + + "GetStoreFileRequest\022 \n\006region\030\001 \002(\0132\020.Re" + + "gionSpecifier\022\016\n\006family\030\002 \003(\014\")\n\024GetStor", + "eFileResponse\022\021\n\tstoreFile\030\001 \003(\t\"\030\n\026GetO" + + "nlineRegionRequest\":\n\027GetOnlineRegionRes" + + "ponse\022\037\n\nregionInfo\030\001 \003(\0132\013.RegionInfo\"\225" + + "\001\n\021OpenRegionRequest\0223\n\010openInfo\030\001 \003(\0132!" + + ".OpenRegionRequest.RegionOpenInfo\032K\n\016Reg" + + "ionOpenInfo\022\033\n\006region\030\001 \002(\0132\013.RegionInfo" + + "\022\034\n\024versionOfOfflineNode\030\002 \001(\r\"\234\001\n\022OpenR" + + "egionResponse\022<\n\014openingState\030\001 \003(\0162&.Op" + + "enRegionResponse.RegionOpeningState\"H\n\022R" + + "egionOpeningState\022\n\n\006OPENED\020\000\022\022\n\016ALREADY", + "_OPENED\020\001\022\022\n\016FAILED_OPENING\020\002\"\232\001\n\022CloseR" + + "egionRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpe" + + "cifier\022\034\n\024versionOfClosingNode\030\002 \001(\r\022\034\n\016" + + "transitionInZK\030\003 \001(\010:\004true\022&\n\021destinatio" + + "nServer\030\004 \001(\0132\013.ServerName\"%\n\023CloseRegio" + + "nResponse\022\016\n\006closed\030\001 \002(\010\"M\n\022FlushRegion" + + "Request\022 \n\006region\030\001 \002(\0132\020.RegionSpecifie" + + "r\022\025\n\rifOlderThanTs\030\002 \001(\004\"=\n\023FlushRegionR" + + "esponse\022\025\n\rlastFlushTime\030\001 \002(\004\022\017\n\007flushe" + + "d\030\002 \001(\010\"J\n\022SplitRegionRequest\022 \n\006region\030", + "\001 \002(\0132\020.RegionSpecifier\022\022\n\nsplitPoint\030\002 " + + "\001(\014\"\025\n\023SplitRegionResponse\"W\n\024CompactReg" + + "ionRequest\022 \n\006region\030\001 \002(\0132\020.RegionSpeci" + + "fier\022\r\n\005major\030\002 \001(\010\022\016\n\006family\030\003 \001(\014\"\027\n\025C" + + "ompactRegionResponse\"t\n\023MergeRegionsRequ" + + "est\022!\n\007regionA\030\001 \002(\0132\020.RegionSpecifier\022!" + + "\n\007regionB\030\002 \002(\0132\020.RegionSpecifier\022\027\n\010for" + + "cible\030\003 \001(\010:\005false\"\026\n\024MergeRegionsRespon" + + "se\"7\n\010WALEntry\022\024\n\003key\030\001 \002(\0132\007.WALKey\022\025\n\r" + + "keyValueBytes\030\002 \003(\014\"4\n\030ReplicateWALEntry", + "Request\022\030\n\005entry\030\001 \003(\0132\t.WALEntry\"\033\n\031Rep" + + "licateWALEntryResponse\"\026\n\024RollWALWriterR" + + "equest\".\n\025RollWALWriterResponse\022\025\n\rregio" + + "nToFlush\030\001 \003(\014\"#\n\021StopServerRequest\022\016\n\006r" + + "eason\030\001 \002(\t\"\024\n\022StopServerResponse\"\026\n\024Get" + + "ServerInfoRequest\"@\n\nServerInfo\022\037\n\nserve" + + "rName\030\001 \002(\0132\013.ServerName\022\021\n\twebuiPort\030\002 " + + "\001(\r\"8\n\025GetServerInfoResponse\022\037\n\nserverIn" + + "fo\030\001 \002(\0132\013.ServerInfo2\266\006\n\014AdminService\022>" + + "\n\rgetRegionInfo\022\025.GetRegionInfoRequest\032\026", + ".GetRegionInfoResponse\022;\n\014getStoreFile\022\024" + + ".GetStoreFileRequest\032\025.GetStoreFileRespo" + + "nse\022D\n\017getOnlineRegion\022\027.GetOnlineRegion" + + "Request\032\030.GetOnlineRegionResponse\0225\n\nope" + + "nRegion\022\022.OpenRegionRequest\032\023.OpenRegion" + + "Response\0228\n\013closeRegion\022\023.CloseRegionReq" + + "uest\032\024.CloseRegionResponse\0228\n\013flushRegio" + + "n\022\023.FlushRegionRequest\032\024.FlushRegionResp" + + "onse\0228\n\013splitRegion\022\023.SplitRegionRequest" + + "\032\024.SplitRegionResponse\022>\n\rcompactRegion\022", + "\025.CompactRegionRequest\032\026.CompactRegionRe" + + "sponse\022;\n\014mergeRegions\022\024.MergeRegionsReq" + + "uest\032\025.MergeRegionsResponse\022J\n\021replicate" + + "WALEntry\022\031.ReplicateWALEntryRequest\032\032.Re" + + "plicateWALEntryResponse\022>\n\rrollWALWriter" + + "\022\025.RollWALWriterRequest\032\026.RollWALWriterR" + + "esponse\022>\n\rgetServerInfo\022\025.GetServerInfo" + + "Request\032\026.GetServerInfoResponse\0225\n\nstopS" + + "erver\022\022.StopServerRequest\032\023.StopServerRe" + + "sponseBA\n*org.apache.hadoop.hbase.protob", + "uf.generatedB\013AdminProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -17743,48 +15193,16 @@ public final class AdminProtos { new java.lang.String[] { }, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.MergeRegionsResponse.Builder.class); - internal_static_UUID_descriptor = - getDescriptor().getMessageTypes().get(18); - internal_static_UUID_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_UUID_descriptor, - new java.lang.String[] { "LeastSigBits", "MostSigBits", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UUID.Builder.class); internal_static_WALEntry_descriptor = - getDescriptor().getMessageTypes().get(19); + getDescriptor().getMessageTypes().get(18); internal_static_WALEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_WALEntry_descriptor, - new java.lang.String[] { "Key", "Edit", }, + new java.lang.String[] { "Key", "KeyValueBytes", }, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.Builder.class); - internal_static_WALEntry_WALKey_descriptor = - internal_static_WALEntry_descriptor.getNestedTypes().get(0); - internal_static_WALEntry_WALKey_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_WALKey_descriptor, - new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALKey.Builder.class); - internal_static_WALEntry_WALEdit_descriptor = - internal_static_WALEntry_descriptor.getNestedTypes().get(1); - internal_static_WALEntry_WALEdit_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_WALEdit_descriptor, - new java.lang.String[] { "KeyValueBytes", "FamilyScope", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.Builder.class); - internal_static_WALEntry_WALEdit_FamilyScope_descriptor = - internal_static_WALEntry_WALEdit_descriptor.getNestedTypes().get(0); - internal_static_WALEntry_WALEdit_FamilyScope_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_WALEntry_WALEdit_FamilyScope_descriptor, - new java.lang.String[] { "Family", "ScopeType", }, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.class, - org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry.WALEdit.FamilyScope.Builder.class); internal_static_ReplicateWALEntryRequest_descriptor = - getDescriptor().getMessageTypes().get(20); + getDescriptor().getMessageTypes().get(19); internal_static_ReplicateWALEntryRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicateWALEntryRequest_descriptor, @@ -17792,7 +15210,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryRequest.Builder.class); internal_static_ReplicateWALEntryResponse_descriptor = - getDescriptor().getMessageTypes().get(21); + getDescriptor().getMessageTypes().get(20); internal_static_ReplicateWALEntryResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ReplicateWALEntryResponse_descriptor, @@ -17800,7 +15218,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ReplicateWALEntryResponse.Builder.class); internal_static_RollWALWriterRequest_descriptor = - getDescriptor().getMessageTypes().get(22); + getDescriptor().getMessageTypes().get(21); internal_static_RollWALWriterRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RollWALWriterRequest_descriptor, @@ -17808,7 +15226,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest.Builder.class); internal_static_RollWALWriterResponse_descriptor = - getDescriptor().getMessageTypes().get(23); + getDescriptor().getMessageTypes().get(22); internal_static_RollWALWriterResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RollWALWriterResponse_descriptor, @@ -17816,7 +15234,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse.Builder.class); internal_static_StopServerRequest_descriptor = - getDescriptor().getMessageTypes().get(24); + getDescriptor().getMessageTypes().get(23); internal_static_StopServerRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopServerRequest_descriptor, @@ -17824,7 +15242,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest.Builder.class); internal_static_StopServerResponse_descriptor = - getDescriptor().getMessageTypes().get(25); + getDescriptor().getMessageTypes().get(24); internal_static_StopServerResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_StopServerResponse_descriptor, @@ -17832,7 +15250,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerResponse.Builder.class); internal_static_GetServerInfoRequest_descriptor = - getDescriptor().getMessageTypes().get(26); + getDescriptor().getMessageTypes().get(25); internal_static_GetServerInfoRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetServerInfoRequest_descriptor, @@ -17840,7 +15258,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetServerInfoRequest.Builder.class); internal_static_ServerInfo_descriptor = - getDescriptor().getMessageTypes().get(27); + getDescriptor().getMessageTypes().get(26); internal_static_ServerInfo_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_ServerInfo_descriptor, @@ -17848,7 +15266,7 @@ public final class AdminProtos { org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.ServerInfo.Builder.class); internal_static_GetServerInfoResponse_descriptor = - getDescriptor().getMessageTypes().get(28); + getDescriptor().getMessageTypes().get(27); internal_static_GetServerInfoResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_GetServerInfoResponse_descriptor, @@ -17862,6 +15280,7 @@ public final class AdminProtos { .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + org.apache.hadoop.hbase.protobuf.generated.WALProtos.getDescriptor(), }, assigner); } diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java index 23a4f4ffbed..d25caef2198 100644 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/HBaseProtos.java @@ -13545,6 +13545,459 @@ public final class HBaseProtos { // @@protoc_insertion_point(class_scope:BigDecimalMsg) } + public interface UUIDOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 leastSigBits = 1; + boolean hasLeastSigBits(); + long getLeastSigBits(); + + // required uint64 mostSigBits = 2; + boolean hasMostSigBits(); + long getMostSigBits(); + } + public static final class UUID extends + com.google.protobuf.GeneratedMessage + implements UUIDOrBuilder { + // Use UUID.newBuilder() to construct. + private UUID(Builder builder) { + super(builder); + } + private UUID(boolean noInit) {} + + private static final UUID defaultInstance; + public static UUID getDefaultInstance() { + return defaultInstance; + } + + public UUID getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_UUID_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_UUID_fieldAccessorTable; + } + + private int bitField0_; + // required uint64 leastSigBits = 1; + public static final int LEASTSIGBITS_FIELD_NUMBER = 1; + private long leastSigBits_; + public boolean hasLeastSigBits() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLeastSigBits() { + return leastSigBits_; + } + + // required uint64 mostSigBits = 2; + public static final int MOSTSIGBITS_FIELD_NUMBER = 2; + private long mostSigBits_; + public boolean hasMostSigBits() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getMostSigBits() { + return mostSigBits_; + } + + private void initFields() { + leastSigBits_ = 0L; + mostSigBits_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasLeastSigBits()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasMostSigBits()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, leastSigBits_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, mostSigBits_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, leastSigBits_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, mostSigBits_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID other = (org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID) obj; + + boolean result = true; + result = result && (hasLeastSigBits() == other.hasLeastSigBits()); + if (hasLeastSigBits()) { + result = result && (getLeastSigBits() + == other.getLeastSigBits()); + } + result = result && (hasMostSigBits() == other.hasMostSigBits()); + if (hasMostSigBits()) { + result = result && (getMostSigBits() + == other.getMostSigBits()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasLeastSigBits()) { + hash = (37 * hash) + LEASTSIGBITS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLeastSigBits()); + } + if (hasMostSigBits()) { + hash = (37 * hash) + MOSTSIGBITS_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getMostSigBits()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_UUID_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.internal_static_UUID_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + leastSigBits_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + mostSigBits_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID build() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID result = new org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.leastSigBits_ = leastSigBits_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.mostSigBits_ = mostSigBits_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) return this; + if (other.hasLeastSigBits()) { + setLeastSigBits(other.getLeastSigBits()); + } + if (other.hasMostSigBits()) { + setMostSigBits(other.getMostSigBits()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasLeastSigBits()) { + + return false; + } + if (!hasMostSigBits()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + leastSigBits_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + mostSigBits_ = input.readUInt64(); + break; + } + } + } + } + + private int bitField0_; + + // required uint64 leastSigBits = 1; + private long leastSigBits_ ; + public boolean hasLeastSigBits() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public long getLeastSigBits() { + return leastSigBits_; + } + public Builder setLeastSigBits(long value) { + bitField0_ |= 0x00000001; + leastSigBits_ = value; + onChanged(); + return this; + } + public Builder clearLeastSigBits() { + bitField0_ = (bitField0_ & ~0x00000001); + leastSigBits_ = 0L; + onChanged(); + return this; + } + + // required uint64 mostSigBits = 2; + private long mostSigBits_ ; + public boolean hasMostSigBits() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public long getMostSigBits() { + return mostSigBits_; + } + public Builder setMostSigBits(long value) { + bitField0_ |= 0x00000002; + mostSigBits_ = value; + onChanged(); + return this; + } + public Builder clearMostSigBits() { + bitField0_ = (bitField0_ & ~0x00000002); + mostSigBits_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:UUID) + } + + static { + defaultInstance = new UUID(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:UUID) + } + private static com.google.protobuf.Descriptors.Descriptor internal_static_Cell_descriptor; private static @@ -13645,6 +14098,11 @@ public final class HBaseProtos { private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_BigDecimalMsg_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_UUID_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_UUID_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { @@ -13706,14 +14164,15 @@ public final class HBaseProtos { "n.Type:\005FLUSH\022\017\n\007version\030\005 \001(\005\"\037\n\004Type\022\014", "\n\010DISABLED\020\000\022\t\n\005FLUSH\020\001\"\n\n\010EmptyMsg\"\032\n\007L" + "ongMsg\022\017\n\007longMsg\030\001 \002(\003\"&\n\rBigDecimalMsg" + - "\022\025\n\rbigdecimalMsg\030\001 \002(\014*`\n\010CellType\022\013\n\007M" + - "INIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010\022\021\n\rDELETE_" + - "COLUMN\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014\n\007MAXIMUM\020\377" + - "\001*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n\rLESS_OR_EQ" + - "UAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020GREAT" + - "ER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO_OP\020\006B>\n" + - "*org.apache.hadoop.hbase.protobuf.genera" + - "tedB\013HBaseProtosH\001\240\001\001" + "\022\025\n\rbigdecimalMsg\030\001 \002(\014\"1\n\004UUID\022\024\n\014least" + + "SigBits\030\001 \002(\004\022\023\n\013mostSigBits\030\002 \002(\004*`\n\010Ce" + + "llType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006DELETE\020\010" + + "\022\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE_FAMILY\020\016\022\014" + + "\n\007MAXIMUM\020\377\001*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n" + + "\rLESS_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL" + + "\020\003\022\024\n\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n" + + "\005NO_OP\020\006B>\n*org.apache.hadoop.hbase.prot", + "obuf.generatedB\013HBaseProtosH\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { @@ -13880,6 +14339,14 @@ public final class HBaseProtos { new java.lang.String[] { "BigdecimalMsg", }, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BigDecimalMsg.Builder.class); + internal_static_UUID_descriptor = + getDescriptor().getMessageTypes().get(20); + internal_static_UUID_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_UUID_descriptor, + new java.lang.String[] { "LeastSigBits", "MostSigBits", }, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.class, + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder.class); return null; } }; diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WAL.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WAL.java deleted file mode 100644 index 020debdaa2f..00000000000 --- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WAL.java +++ /dev/null @@ -1,938 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: WAL.proto - -package org.apache.hadoop.hbase.protobuf.generated; - -public final class WAL { - private WAL() {} - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - } - public interface CompactionDescriptorOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required bytes tableName = 1; - boolean hasTableName(); - com.google.protobuf.ByteString getTableName(); - - // required bytes encodedRegionName = 2; - boolean hasEncodedRegionName(); - com.google.protobuf.ByteString getEncodedRegionName(); - - // required bytes familyName = 3; - boolean hasFamilyName(); - com.google.protobuf.ByteString getFamilyName(); - - // repeated string compactionInput = 4; - java.util.List getCompactionInputList(); - int getCompactionInputCount(); - String getCompactionInput(int index); - - // repeated string compactionOutput = 5; - java.util.List getCompactionOutputList(); - int getCompactionOutputCount(); - String getCompactionOutput(int index); - - // required string storeHomeDir = 6; - boolean hasStoreHomeDir(); - String getStoreHomeDir(); - } - public static final class CompactionDescriptor extends - com.google.protobuf.GeneratedMessage - implements CompactionDescriptorOrBuilder { - // Use CompactionDescriptor.newBuilder() to construct. - private CompactionDescriptor(Builder builder) { - super(builder); - } - private CompactionDescriptor(boolean noInit) {} - - private static final CompactionDescriptor defaultInstance; - public static CompactionDescriptor getDefaultInstance() { - return defaultInstance; - } - - public CompactionDescriptor getDefaultInstanceForType() { - return defaultInstance; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.WAL.internal_static_CompactionDescriptor_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.WAL.internal_static_CompactionDescriptor_fieldAccessorTable; - } - - private int bitField0_; - // required bytes tableName = 1; - public static final int TABLENAME_FIELD_NUMBER = 1; - private com.google.protobuf.ByteString tableName_; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - - // required bytes encodedRegionName = 2; - public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 2; - private com.google.protobuf.ByteString encodedRegionName_; - public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getEncodedRegionName() { - return encodedRegionName_; - } - - // required bytes familyName = 3; - public static final int FAMILYNAME_FIELD_NUMBER = 3; - private com.google.protobuf.ByteString familyName_; - public boolean hasFamilyName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getFamilyName() { - return familyName_; - } - - // repeated string compactionInput = 4; - public static final int COMPACTIONINPUT_FIELD_NUMBER = 4; - private com.google.protobuf.LazyStringList compactionInput_; - public java.util.List - getCompactionInputList() { - return compactionInput_; - } - public int getCompactionInputCount() { - return compactionInput_.size(); - } - public String getCompactionInput(int index) { - return compactionInput_.get(index); - } - - // repeated string compactionOutput = 5; - public static final int COMPACTIONOUTPUT_FIELD_NUMBER = 5; - private com.google.protobuf.LazyStringList compactionOutput_; - public java.util.List - getCompactionOutputList() { - return compactionOutput_; - } - public int getCompactionOutputCount() { - return compactionOutput_.size(); - } - public String getCompactionOutput(int index) { - return compactionOutput_.get(index); - } - - // required string storeHomeDir = 6; - public static final int STOREHOMEDIR_FIELD_NUMBER = 6; - private java.lang.Object storeHomeDir_; - public boolean hasStoreHomeDir() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - public String getStoreHomeDir() { - java.lang.Object ref = storeHomeDir_; - if (ref instanceof String) { - return (String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - String s = bs.toStringUtf8(); - if (com.google.protobuf.Internal.isValidUtf8(bs)) { - storeHomeDir_ = s; - } - return s; - } - } - private com.google.protobuf.ByteString getStoreHomeDirBytes() { - java.lang.Object ref = storeHomeDir_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8((String) ref); - storeHomeDir_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - private void initFields() { - tableName_ = com.google.protobuf.ByteString.EMPTY; - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - familyName_ = com.google.protobuf.ByteString.EMPTY; - compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - storeHomeDir_ = ""; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasTableName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasEncodedRegionName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasFamilyName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasStoreHomeDir()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, encodedRegionName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, familyName_); - } - for (int i = 0; i < compactionInput_.size(); i++) { - output.writeBytes(4, compactionInput_.getByteString(i)); - } - for (int i = 0; i < compactionOutput_.size(); i++) { - output.writeBytes(5, compactionOutput_.getByteString(i)); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeBytes(6, getStoreHomeDirBytes()); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, tableName_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, encodedRegionName_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, familyName_); - } - { - int dataSize = 0; - for (int i = 0; i < compactionInput_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(compactionInput_.getByteString(i)); - } - size += dataSize; - size += 1 * getCompactionInputList().size(); - } - { - int dataSize = 0; - for (int i = 0; i < compactionOutput_.size(); i++) { - dataSize += com.google.protobuf.CodedOutputStream - .computeBytesSizeNoTag(compactionOutput_.getByteString(i)); - } - size += dataSize; - size += 1 * getCompactionOutputList().size(); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(6, getStoreHomeDirBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor)) { - return super.equals(obj); - } - org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor) obj; - - boolean result = true; - result = result && (hasTableName() == other.hasTableName()); - if (hasTableName()) { - result = result && getTableName() - .equals(other.getTableName()); - } - result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); - if (hasEncodedRegionName()) { - result = result && getEncodedRegionName() - .equals(other.getEncodedRegionName()); - } - result = result && (hasFamilyName() == other.hasFamilyName()); - if (hasFamilyName()) { - result = result && getFamilyName() - .equals(other.getFamilyName()); - } - result = result && getCompactionInputList() - .equals(other.getCompactionInputList()); - result = result && getCompactionOutputList() - .equals(other.getCompactionOutputList()); - result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); - if (hasStoreHomeDir()) { - result = result && getStoreHomeDir() - .equals(other.getStoreHomeDir()); - } - result = result && - getUnknownFields().equals(other.getUnknownFields()); - return result; - } - - @java.lang.Override - public int hashCode() { - int hash = 41; - hash = (19 * hash) + getDescriptorForType().hashCode(); - if (hasTableName()) { - hash = (37 * hash) + TABLENAME_FIELD_NUMBER; - hash = (53 * hash) + getTableName().hashCode(); - } - if (hasEncodedRegionName()) { - hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER; - hash = (53 * hash) + getEncodedRegionName().hashCode(); - } - if (hasFamilyName()) { - hash = (37 * hash) + FAMILYNAME_FIELD_NUMBER; - hash = (53 * hash) + getFamilyName().hashCode(); - } - if (getCompactionInputCount() > 0) { - hash = (37 * hash) + COMPACTIONINPUT_FIELD_NUMBER; - hash = (53 * hash) + getCompactionInputList().hashCode(); - } - if (getCompactionOutputCount() > 0) { - hash = (37 * hash) + COMPACTIONOUTPUT_FIELD_NUMBER; - hash = (53 * hash) + getCompactionOutputList().hashCode(); - } - if (hasStoreHomeDir()) { - hash = (37 * hash) + STOREHOMEDIR_FIELD_NUMBER; - hash = (53 * hash) + getStoreHomeDir().hashCode(); - } - hash = (29 * hash) + getUnknownFields().hashCode(); - return hash; - } - - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return newBuilder().mergeFrom(data, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseFrom(java.io.InputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - Builder builder = newBuilder(); - if (builder.mergeDelimitedFrom(input, extensionRegistry)) { - return builder.buildParsed(); - } else { - return null; - } - } - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return newBuilder().mergeFrom(input).buildParsed(); - } - public static org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return newBuilder().mergeFrom(input, extensionRegistry) - .buildParsed(); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder - implements org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptorOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.apache.hadoop.hbase.protobuf.generated.WAL.internal_static_CompactionDescriptor_descriptor; - } - - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.apache.hadoop.hbase.protobuf.generated.WAL.internal_static_CompactionDescriptor_fieldAccessorTable; - } - - // Construct using org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder(BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - tableName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000002); - familyName_ = com.google.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000004); - compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000008); - compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000010); - storeHomeDir_ = ""; - bitField0_ = (bitField0_ & ~0x00000020); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor.getDescriptor(); - } - - public org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor getDefaultInstanceForType() { - return org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor.getDefaultInstance(); - } - - public org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor build() { - org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - private org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor buildParsed() - throws com.google.protobuf.InvalidProtocolBufferException { - org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException( - result).asInvalidProtocolBufferException(); - } - return result; - } - - public org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor buildPartial() { - org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.tableName_ = tableName_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.encodedRegionName_ = encodedRegionName_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.familyName_ = familyName_; - if (((bitField0_ & 0x00000008) == 0x00000008)) { - compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList( - compactionInput_); - bitField0_ = (bitField0_ & ~0x00000008); - } - result.compactionInput_ = compactionInput_; - if (((bitField0_ & 0x00000010) == 0x00000010)) { - compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList( - compactionOutput_); - bitField0_ = (bitField0_ & ~0x00000010); - } - result.compactionOutput_ = compactionOutput_; - if (((from_bitField0_ & 0x00000020) == 0x00000020)) { - to_bitField0_ |= 0x00000008; - } - result.storeHomeDir_ = storeHomeDir_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor) { - return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor other) { - if (other == org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor.getDefaultInstance()) return this; - if (other.hasTableName()) { - setTableName(other.getTableName()); - } - if (other.hasEncodedRegionName()) { - setEncodedRegionName(other.getEncodedRegionName()); - } - if (other.hasFamilyName()) { - setFamilyName(other.getFamilyName()); - } - if (!other.compactionInput_.isEmpty()) { - if (compactionInput_.isEmpty()) { - compactionInput_ = other.compactionInput_; - bitField0_ = (bitField0_ & ~0x00000008); - } else { - ensureCompactionInputIsMutable(); - compactionInput_.addAll(other.compactionInput_); - } - onChanged(); - } - if (!other.compactionOutput_.isEmpty()) { - if (compactionOutput_.isEmpty()) { - compactionOutput_ = other.compactionOutput_; - bitField0_ = (bitField0_ & ~0x00000010); - } else { - ensureCompactionOutputIsMutable(); - compactionOutput_.addAll(other.compactionOutput_); - } - onChanged(); - } - if (other.hasStoreHomeDir()) { - setStoreHomeDir(other.getStoreHomeDir()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasTableName()) { - - return false; - } - if (!hasEncodedRegionName()) { - - return false; - } - if (!hasFamilyName()) { - - return false; - } - if (!hasStoreHomeDir()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder( - this.getUnknownFields()); - while (true) { - int tag = input.readTag(); - switch (tag) { - case 0: - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - this.setUnknownFields(unknownFields.build()); - onChanged(); - return this; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - tableName_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - encodedRegionName_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - familyName_ = input.readBytes(); - break; - } - case 34: { - ensureCompactionInputIsMutable(); - compactionInput_.add(input.readBytes()); - break; - } - case 42: { - ensureCompactionOutputIsMutable(); - compactionOutput_.add(input.readBytes()); - break; - } - case 50: { - bitField0_ |= 0x00000020; - storeHomeDir_ = input.readBytes(); - break; - } - } - } - } - - private int bitField0_; - - // required bytes tableName = 1; - private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasTableName() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - public com.google.protobuf.ByteString getTableName() { - return tableName_; - } - public Builder setTableName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - tableName_ = value; - onChanged(); - return this; - } - public Builder clearTableName() { - bitField0_ = (bitField0_ & ~0x00000001); - tableName_ = getDefaultInstance().getTableName(); - onChanged(); - return this; - } - - // required bytes encodedRegionName = 2; - private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasEncodedRegionName() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - public com.google.protobuf.ByteString getEncodedRegionName() { - return encodedRegionName_; - } - public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - encodedRegionName_ = value; - onChanged(); - return this; - } - public Builder clearEncodedRegionName() { - bitField0_ = (bitField0_ & ~0x00000002); - encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); - onChanged(); - return this; - } - - // required bytes familyName = 3; - private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; - public boolean hasFamilyName() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - public com.google.protobuf.ByteString getFamilyName() { - return familyName_; - } - public Builder setFamilyName(com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - familyName_ = value; - onChanged(); - return this; - } - public Builder clearFamilyName() { - bitField0_ = (bitField0_ & ~0x00000004); - familyName_ = getDefaultInstance().getFamilyName(); - onChanged(); - return this; - } - - // repeated string compactionInput = 4; - private com.google.protobuf.LazyStringList compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureCompactionInputIsMutable() { - if (!((bitField0_ & 0x00000008) == 0x00000008)) { - compactionInput_ = new com.google.protobuf.LazyStringArrayList(compactionInput_); - bitField0_ |= 0x00000008; - } - } - public java.util.List - getCompactionInputList() { - return java.util.Collections.unmodifiableList(compactionInput_); - } - public int getCompactionInputCount() { - return compactionInput_.size(); - } - public String getCompactionInput(int index) { - return compactionInput_.get(index); - } - public Builder setCompactionInput( - int index, String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureCompactionInputIsMutable(); - compactionInput_.set(index, value); - onChanged(); - return this; - } - public Builder addCompactionInput(String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureCompactionInputIsMutable(); - compactionInput_.add(value); - onChanged(); - return this; - } - public Builder addAllCompactionInput( - java.lang.Iterable values) { - ensureCompactionInputIsMutable(); - super.addAll(values, compactionInput_); - onChanged(); - return this; - } - public Builder clearCompactionInput() { - compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000008); - onChanged(); - return this; - } - void addCompactionInput(com.google.protobuf.ByteString value) { - ensureCompactionInputIsMutable(); - compactionInput_.add(value); - onChanged(); - } - - // repeated string compactionOutput = 5; - private com.google.protobuf.LazyStringList compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - private void ensureCompactionOutputIsMutable() { - if (!((bitField0_ & 0x00000010) == 0x00000010)) { - compactionOutput_ = new com.google.protobuf.LazyStringArrayList(compactionOutput_); - bitField0_ |= 0x00000010; - } - } - public java.util.List - getCompactionOutputList() { - return java.util.Collections.unmodifiableList(compactionOutput_); - } - public int getCompactionOutputCount() { - return compactionOutput_.size(); - } - public String getCompactionOutput(int index) { - return compactionOutput_.get(index); - } - public Builder setCompactionOutput( - int index, String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureCompactionOutputIsMutable(); - compactionOutput_.set(index, value); - onChanged(); - return this; - } - public Builder addCompactionOutput(String value) { - if (value == null) { - throw new NullPointerException(); - } - ensureCompactionOutputIsMutable(); - compactionOutput_.add(value); - onChanged(); - return this; - } - public Builder addAllCompactionOutput( - java.lang.Iterable values) { - ensureCompactionOutputIsMutable(); - super.addAll(values, compactionOutput_); - onChanged(); - return this; - } - public Builder clearCompactionOutput() { - compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; - bitField0_ = (bitField0_ & ~0x00000010); - onChanged(); - return this; - } - void addCompactionOutput(com.google.protobuf.ByteString value) { - ensureCompactionOutputIsMutable(); - compactionOutput_.add(value); - onChanged(); - } - - // required string storeHomeDir = 6; - private java.lang.Object storeHomeDir_ = ""; - public boolean hasStoreHomeDir() { - return ((bitField0_ & 0x00000020) == 0x00000020); - } - public String getStoreHomeDir() { - java.lang.Object ref = storeHomeDir_; - if (!(ref instanceof String)) { - String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); - storeHomeDir_ = s; - return s; - } else { - return (String) ref; - } - } - public Builder setStoreHomeDir(String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000020; - storeHomeDir_ = value; - onChanged(); - return this; - } - public Builder clearStoreHomeDir() { - bitField0_ = (bitField0_ & ~0x00000020); - storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); - onChanged(); - return this; - } - void setStoreHomeDir(com.google.protobuf.ByteString value) { - bitField0_ |= 0x00000020; - storeHomeDir_ = value; - onChanged(); - } - - // @@protoc_insertion_point(builder_scope:CompactionDescriptor) - } - - static { - defaultInstance = new CompactionDescriptor(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:CompactionDescriptor) - } - - private static com.google.protobuf.Descriptors.Descriptor - internal_static_CompactionDescriptor_descriptor; - private static - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_CompactionDescriptor_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\tWAL.proto\032\013hbase.proto\"\241\001\n\024CompactionD" + - "escriptor\022\021\n\ttableName\030\001 \002(\014\022\031\n\021encodedR" + - "egionName\030\002 \002(\014\022\022\n\nfamilyName\030\003 \002(\014\022\027\n\017c" + - "ompactionInput\030\004 \003(\t\022\030\n\020compactionOutput" + - "\030\005 \003(\t\022\024\n\014storeHomeDir\030\006 \002(\tB6\n*org.apac" + - "he.hadoop.hbase.protobuf.generatedB\003WALH" + - "\001\240\001\001" - }; - com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public com.google.protobuf.ExtensionRegistry assignDescriptors( - com.google.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_CompactionDescriptor_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_CompactionDescriptor_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_CompactionDescriptor_descriptor, - new java.lang.String[] { "TableName", "EncodedRegionName", "FamilyName", "CompactionInput", "CompactionOutput", "StoreHomeDir", }, - org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor.class, - org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor.Builder.class); - return null; - } - }; - com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java new file mode 100644 index 00000000000..d5e9d844168 --- /dev/null +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/WALProtos.java @@ -0,0 +1,3019 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: WAL.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class WALProtos { + private WALProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public enum ScopeType + implements com.google.protobuf.ProtocolMessageEnum { + REPLICATION_SCOPE_LOCAL(0, 0), + REPLICATION_SCOPE_GLOBAL(1, 1), + ; + + public static final int REPLICATION_SCOPE_LOCAL_VALUE = 0; + public static final int REPLICATION_SCOPE_GLOBAL_VALUE = 1; + + + public final int getNumber() { return value; } + + public static ScopeType valueOf(int value) { + switch (value) { + case 0: return REPLICATION_SCOPE_LOCAL; + case 1: return REPLICATION_SCOPE_GLOBAL; + default: return null; + } + } + + public static com.google.protobuf.Internal.EnumLiteMap + internalGetValueMap() { + return internalValueMap; + } + private static com.google.protobuf.Internal.EnumLiteMap + internalValueMap = + new com.google.protobuf.Internal.EnumLiteMap() { + public ScopeType findValueByNumber(int number) { + return ScopeType.valueOf(number); + } + }; + + public final com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(index); + } + public final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.getDescriptor().getEnumTypes().get(0); + } + + private static final ScopeType[] VALUES = { + REPLICATION_SCOPE_LOCAL, REPLICATION_SCOPE_GLOBAL, + }; + + public static ScopeType valueOf( + com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int index; + private final int value; + + private ScopeType(int index, int value) { + this.index = index; + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:ScopeType) + } + + public interface WALHeaderOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional bool hasCompression = 1; + boolean hasHasCompression(); + boolean getHasCompression(); + } + public static final class WALHeader extends + com.google.protobuf.GeneratedMessage + implements WALHeaderOrBuilder { + // Use WALHeader.newBuilder() to construct. + private WALHeader(Builder builder) { + super(builder); + } + private WALHeader(boolean noInit) {} + + private static final WALHeader defaultInstance; + public static WALHeader getDefaultInstance() { + return defaultInstance; + } + + public WALHeader getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_fieldAccessorTable; + } + + private int bitField0_; + // optional bool hasCompression = 1; + public static final int HASCOMPRESSION_FIELD_NUMBER = 1; + private boolean hasCompression_; + public boolean hasHasCompression() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getHasCompression() { + return hasCompression_; + } + + private void initFields() { + hasCompression_ = false; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBool(1, hasCompression_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(1, hasCompression_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) obj; + + boolean result = true; + result = result && (hasHasCompression() == other.hasHasCompression()); + if (hasHasCompression()) { + result = result && (getHasCompression() + == other.getHasCompression()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasHasCompression()) { + hash = (37 * hash) + HASCOMPRESSION_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getHasCompression()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeaderOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALHeader_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + hasCompression_ = false; + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader build() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.hasCompression_ = hasCompression_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.getDefaultInstance()) return this; + if (other.hasHasCompression()) { + setHasCompression(other.getHasCompression()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + hasCompression_ = input.readBool(); + break; + } + } + } + } + + private int bitField0_; + + // optional bool hasCompression = 1; + private boolean hasCompression_ ; + public boolean hasHasCompression() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public boolean getHasCompression() { + return hasCompression_; + } + public Builder setHasCompression(boolean value) { + bitField0_ |= 0x00000001; + hasCompression_ = value; + onChanged(); + return this; + } + public Builder clearHasCompression() { + bitField0_ = (bitField0_ & ~0x00000001); + hasCompression_ = false; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:WALHeader) + } + + static { + defaultInstance = new WALHeader(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALHeader) + } + + public interface WALKeyOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes encodedRegionName = 1; + boolean hasEncodedRegionName(); + com.google.protobuf.ByteString getEncodedRegionName(); + + // required bytes tableName = 2; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required uint64 logSequenceNumber = 3; + boolean hasLogSequenceNumber(); + long getLogSequenceNumber(); + + // required uint64 writeTime = 4; + boolean hasWriteTime(); + long getWriteTime(); + + // optional .UUID clusterId = 5; + boolean hasClusterId(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId(); + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder(); + + // repeated .FamilyScope scopes = 6; + java.util.List + getScopesList(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index); + int getScopesCount(); + java.util.List + getScopesOrBuilderList(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( + int index); + + // optional uint32 followingKvCount = 7; + boolean hasFollowingKvCount(); + int getFollowingKvCount(); + } + public static final class WALKey extends + com.google.protobuf.GeneratedMessage + implements WALKeyOrBuilder { + // Use WALKey.newBuilder() to construct. + private WALKey(Builder builder) { + super(builder); + } + private WALKey(boolean noInit) {} + + private static final WALKey defaultInstance; + public static WALKey getDefaultInstance() { + return defaultInstance; + } + + public WALKey getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_fieldAccessorTable; + } + + private int bitField0_; + // required bytes encodedRegionName = 1; + public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString encodedRegionName_; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + + // required bytes tableName = 2; + public static final int TABLENAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required uint64 logSequenceNumber = 3; + public static final int LOGSEQUENCENUMBER_FIELD_NUMBER = 3; + private long logSequenceNumber_; + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + + // required uint64 writeTime = 4; + public static final int WRITETIME_FIELD_NUMBER = 4; + private long writeTime_; + public boolean hasWriteTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getWriteTime() { + return writeTime_; + } + + // optional .UUID clusterId = 5; + public static final int CLUSTERID_FIELD_NUMBER = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() { + return clusterId_; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() { + return clusterId_; + } + + // repeated .FamilyScope scopes = 6; + public static final int SCOPES_FIELD_NUMBER = 6; + private java.util.List scopes_; + public java.util.List getScopesList() { + return scopes_; + } + public java.util.List + getScopesOrBuilderList() { + return scopes_; + } + public int getScopesCount() { + return scopes_.size(); + } + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) { + return scopes_.get(index); + } + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( + int index) { + return scopes_.get(index); + } + + // optional uint32 followingKvCount = 7; + public static final int FOLLOWINGKVCOUNT_FIELD_NUMBER = 7; + private int followingKvCount_; + public boolean hasFollowingKvCount() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public int getFollowingKvCount() { + return followingKvCount_; + } + + private void initFields() { + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + tableName_ = com.google.protobuf.ByteString.EMPTY; + logSequenceNumber_ = 0L; + writeTime_ = 0L; + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); + scopes_ = java.util.Collections.emptyList(); + followingKvCount_ = 0; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasEncodedRegionName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasLogSequenceNumber()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasWriteTime()) { + memoizedIsInitialized = 0; + return false; + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getScopesCount(); i++) { + if (!getScopes(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, encodedRegionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, logSequenceNumber_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeUInt64(4, writeTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeMessage(5, clusterId_); + } + for (int i = 0; i < scopes_.size(); i++) { + output.writeMessage(6, scopes_.get(i)); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + output.writeUInt32(7, followingKvCount_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, encodedRegionName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, tableName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, logSequenceNumber_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(4, writeTime_); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(5, clusterId_); + } + for (int i = 0; i < scopes_.size(); i++) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(6, scopes_.get(i)); + } + if (((bitField0_ & 0x00000020) == 0x00000020)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt32Size(7, followingKvCount_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) obj; + + boolean result = true; + result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); + if (hasEncodedRegionName()) { + result = result && getEncodedRegionName() + .equals(other.getEncodedRegionName()); + } + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber()); + if (hasLogSequenceNumber()) { + result = result && (getLogSequenceNumber() + == other.getLogSequenceNumber()); + } + result = result && (hasWriteTime() == other.hasWriteTime()); + if (hasWriteTime()) { + result = result && (getWriteTime() + == other.getWriteTime()); + } + result = result && (hasClusterId() == other.hasClusterId()); + if (hasClusterId()) { + result = result && getClusterId() + .equals(other.getClusterId()); + } + result = result && getScopesList() + .equals(other.getScopesList()); + result = result && (hasFollowingKvCount() == other.hasFollowingKvCount()); + if (hasFollowingKvCount()) { + result = result && (getFollowingKvCount() + == other.getFollowingKvCount()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasEncodedRegionName()) { + hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getEncodedRegionName().hashCode(); + } + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasLogSequenceNumber()) { + hash = (37 * hash) + LOGSEQUENCENUMBER_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getLogSequenceNumber()); + } + if (hasWriteTime()) { + hash = (37 * hash) + WRITETIME_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getWriteTime()); + } + if (hasClusterId()) { + hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; + hash = (53 * hash) + getClusterId().hashCode(); + } + if (getScopesCount() > 0) { + hash = (37 * hash) + SCOPES_FIELD_NUMBER; + hash = (53 * hash) + getScopesList().hashCode(); + } + if (hasFollowingKvCount()) { + hash = (37 * hash) + FOLLOWINGKVCOUNT_FIELD_NUMBER; + hash = (53 * hash) + getFollowingKvCount(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKeyOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_WALKey_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getClusterIdFieldBuilder(); + getScopesFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + logSequenceNumber_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + writeTime_ = 0L; + bitField0_ = (bitField0_ & ~0x00000008); + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + if (scopesBuilder_ == null) { + scopes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + } else { + scopesBuilder_.clear(); + } + followingKvCount_ = 0; + bitField0_ = (bitField0_ & ~0x00000040); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey build() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.encodedRegionName_ = encodedRegionName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.logSequenceNumber_ = logSequenceNumber_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.writeTime_ = writeTime_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + if (clusterIdBuilder_ == null) { + result.clusterId_ = clusterId_; + } else { + result.clusterId_ = clusterIdBuilder_.build(); + } + if (scopesBuilder_ == null) { + if (((bitField0_ & 0x00000020) == 0x00000020)) { + scopes_ = java.util.Collections.unmodifiableList(scopes_); + bitField0_ = (bitField0_ & ~0x00000020); + } + result.scopes_ = scopes_; + } else { + result.scopes_ = scopesBuilder_.build(); + } + if (((from_bitField0_ & 0x00000040) == 0x00000040)) { + to_bitField0_ |= 0x00000020; + } + result.followingKvCount_ = followingKvCount_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this; + if (other.hasEncodedRegionName()) { + setEncodedRegionName(other.getEncodedRegionName()); + } + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasLogSequenceNumber()) { + setLogSequenceNumber(other.getLogSequenceNumber()); + } + if (other.hasWriteTime()) { + setWriteTime(other.getWriteTime()); + } + if (other.hasClusterId()) { + mergeClusterId(other.getClusterId()); + } + if (scopesBuilder_ == null) { + if (!other.scopes_.isEmpty()) { + if (scopes_.isEmpty()) { + scopes_ = other.scopes_; + bitField0_ = (bitField0_ & ~0x00000020); + } else { + ensureScopesIsMutable(); + scopes_.addAll(other.scopes_); + } + onChanged(); + } + } else { + if (!other.scopes_.isEmpty()) { + if (scopesBuilder_.isEmpty()) { + scopesBuilder_.dispose(); + scopesBuilder_ = null; + scopes_ = other.scopes_; + bitField0_ = (bitField0_ & ~0x00000020); + scopesBuilder_ = + com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? + getScopesFieldBuilder() : null; + } else { + scopesBuilder_.addAllMessages(other.scopes_); + } + } + } + if (other.hasFollowingKvCount()) { + setFollowingKvCount(other.getFollowingKvCount()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasEncodedRegionName()) { + + return false; + } + if (!hasTableName()) { + + return false; + } + if (!hasLogSequenceNumber()) { + + return false; + } + if (!hasWriteTime()) { + + return false; + } + if (hasClusterId()) { + if (!getClusterId().isInitialized()) { + + return false; + } + } + for (int i = 0; i < getScopesCount(); i++) { + if (!getScopes(i).isInitialized()) { + + return false; + } + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + encodedRegionName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + tableName_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + logSequenceNumber_ = input.readUInt64(); + break; + } + case 32: { + bitField0_ |= 0x00000008; + writeTime_ = input.readUInt64(); + break; + } + case 42: { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.newBuilder(); + if (hasClusterId()) { + subBuilder.mergeFrom(getClusterId()); + } + input.readMessage(subBuilder, extensionRegistry); + setClusterId(subBuilder.buildPartial()); + break; + } + case 50: { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.newBuilder(); + input.readMessage(subBuilder, extensionRegistry); + addScopes(subBuilder.buildPartial()); + break; + } + case 56: { + bitField0_ |= 0x00000040; + followingKvCount_ = input.readUInt32(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes encodedRegionName = 1; + private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + encodedRegionName_ = value; + onChanged(); + return this; + } + public Builder clearEncodedRegionName() { + bitField0_ = (bitField0_ & ~0x00000001); + encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); + onChanged(); + return this; + } + + // required bytes tableName = 2; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000002); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required uint64 logSequenceNumber = 3; + private long logSequenceNumber_ ; + public boolean hasLogSequenceNumber() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public long getLogSequenceNumber() { + return logSequenceNumber_; + } + public Builder setLogSequenceNumber(long value) { + bitField0_ |= 0x00000004; + logSequenceNumber_ = value; + onChanged(); + return this; + } + public Builder clearLogSequenceNumber() { + bitField0_ = (bitField0_ & ~0x00000004); + logSequenceNumber_ = 0L; + onChanged(); + return this; + } + + // required uint64 writeTime = 4; + private long writeTime_ ; + public boolean hasWriteTime() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public long getWriteTime() { + return writeTime_; + } + public Builder setWriteTime(long value) { + bitField0_ |= 0x00000008; + writeTime_ = value; + onChanged(); + return this; + } + public Builder clearWriteTime() { + bitField0_ = (bitField0_ & ~0x00000008); + writeTime_ = 0L; + onChanged(); + return this; + } + + // optional .UUID clusterId = 5; + private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> clusterIdBuilder_; + public boolean hasClusterId() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID getClusterId() { + if (clusterIdBuilder_ == null) { + return clusterId_; + } else { + return clusterIdBuilder_.getMessage(); + } + } + public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) { + if (clusterIdBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + clusterId_ = value; + onChanged(); + } else { + clusterIdBuilder_.setMessage(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder setClusterId( + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder builderForValue) { + if (clusterIdBuilder_ == null) { + clusterId_ = builderForValue.build(); + onChanged(); + } else { + clusterIdBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID value) { + if (clusterIdBuilder_ == null) { + if (((bitField0_ & 0x00000010) == 0x00000010) && + clusterId_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()) { + clusterId_ = + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial(); + } else { + clusterId_ = value; + } + onChanged(); + } else { + clusterIdBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000010; + return this; + } + public Builder clearClusterId() { + if (clusterIdBuilder_ == null) { + clusterId_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.getDefaultInstance(); + onChanged(); + } else { + clusterIdBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder getClusterIdBuilder() { + bitField0_ |= 0x00000010; + onChanged(); + return getClusterIdFieldBuilder().getBuilder(); + } + public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder getClusterIdOrBuilder() { + if (clusterIdBuilder_ != null) { + return clusterIdBuilder_.getMessageOrBuilder(); + } else { + return clusterId_; + } + } + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder> + getClusterIdFieldBuilder() { + if (clusterIdBuilder_ == null) { + clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUID.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.UUIDOrBuilder>( + clusterId_, + getParentForChildren(), + isClean()); + clusterId_ = null; + } + return clusterIdBuilder_; + } + + // repeated .FamilyScope scopes = 6; + private java.util.List scopes_ = + java.util.Collections.emptyList(); + private void ensureScopesIsMutable() { + if (!((bitField0_ & 0x00000020) == 0x00000020)) { + scopes_ = new java.util.ArrayList(scopes_); + bitField0_ |= 0x00000020; + } + } + + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> scopesBuilder_; + + public java.util.List getScopesList() { + if (scopesBuilder_ == null) { + return java.util.Collections.unmodifiableList(scopes_); + } else { + return scopesBuilder_.getMessageList(); + } + } + public int getScopesCount() { + if (scopesBuilder_ == null) { + return scopes_.size(); + } else { + return scopesBuilder_.getCount(); + } + } + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getScopes(int index) { + if (scopesBuilder_ == null) { + return scopes_.get(index); + } else { + return scopesBuilder_.getMessage(index); + } + } + public Builder setScopes( + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) { + if (scopesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureScopesIsMutable(); + scopes_.set(index, value); + onChanged(); + } else { + scopesBuilder_.setMessage(index, value); + } + return this; + } + public Builder setScopes( + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) { + if (scopesBuilder_ == null) { + ensureScopesIsMutable(); + scopes_.set(index, builderForValue.build()); + onChanged(); + } else { + scopesBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + public Builder addScopes(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) { + if (scopesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureScopesIsMutable(); + scopes_.add(value); + onChanged(); + } else { + scopesBuilder_.addMessage(value); + } + return this; + } + public Builder addScopes( + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope value) { + if (scopesBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureScopesIsMutable(); + scopes_.add(index, value); + onChanged(); + } else { + scopesBuilder_.addMessage(index, value); + } + return this; + } + public Builder addScopes( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) { + if (scopesBuilder_ == null) { + ensureScopesIsMutable(); + scopes_.add(builderForValue.build()); + onChanged(); + } else { + scopesBuilder_.addMessage(builderForValue.build()); + } + return this; + } + public Builder addScopes( + int index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder builderForValue) { + if (scopesBuilder_ == null) { + ensureScopesIsMutable(); + scopes_.add(index, builderForValue.build()); + onChanged(); + } else { + scopesBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + public Builder addAllScopes( + java.lang.Iterable values) { + if (scopesBuilder_ == null) { + ensureScopesIsMutable(); + super.addAll(values, scopes_); + onChanged(); + } else { + scopesBuilder_.addAllMessages(values); + } + return this; + } + public Builder clearScopes() { + if (scopesBuilder_ == null) { + scopes_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000020); + onChanged(); + } else { + scopesBuilder_.clear(); + } + return this; + } + public Builder removeScopes(int index) { + if (scopesBuilder_ == null) { + ensureScopesIsMutable(); + scopes_.remove(index); + onChanged(); + } else { + scopesBuilder_.remove(index); + } + return this; + } + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder getScopesBuilder( + int index) { + return getScopesFieldBuilder().getBuilder(index); + } + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder getScopesOrBuilder( + int index) { + if (scopesBuilder_ == null) { + return scopes_.get(index); } else { + return scopesBuilder_.getMessageOrBuilder(index); + } + } + public java.util.List + getScopesOrBuilderList() { + if (scopesBuilder_ != null) { + return scopesBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(scopes_); + } + } + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder() { + return getScopesFieldBuilder().addBuilder( + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()); + } + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder addScopesBuilder( + int index) { + return getScopesFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()); + } + public java.util.List + getScopesBuilderList() { + return getScopesFieldBuilder().getBuilderList(); + } + private com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder> + getScopesFieldBuilder() { + if (scopesBuilder_ == null) { + scopesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder, org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder>( + scopes_, + ((bitField0_ & 0x00000020) == 0x00000020), + getParentForChildren(), + isClean()); + scopes_ = null; + } + return scopesBuilder_; + } + + // optional uint32 followingKvCount = 7; + private int followingKvCount_ ; + public boolean hasFollowingKvCount() { + return ((bitField0_ & 0x00000040) == 0x00000040); + } + public int getFollowingKvCount() { + return followingKvCount_; + } + public Builder setFollowingKvCount(int value) { + bitField0_ |= 0x00000040; + followingKvCount_ = value; + onChanged(); + return this; + } + public Builder clearFollowingKvCount() { + bitField0_ = (bitField0_ & ~0x00000040); + followingKvCount_ = 0; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:WALKey) + } + + static { + defaultInstance = new WALKey(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:WALKey) + } + + public interface FamilyScopeOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes family = 1; + boolean hasFamily(); + com.google.protobuf.ByteString getFamily(); + + // required .ScopeType scopeType = 2; + boolean hasScopeType(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType(); + } + public static final class FamilyScope extends + com.google.protobuf.GeneratedMessage + implements FamilyScopeOrBuilder { + // Use FamilyScope.newBuilder() to construct. + private FamilyScope(Builder builder) { + super(builder); + } + private FamilyScope(boolean noInit) {} + + private static final FamilyScope defaultInstance; + public static FamilyScope getDefaultInstance() { + return defaultInstance; + } + + public FamilyScope getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_fieldAccessorTable; + } + + private int bitField0_; + // required bytes family = 1; + public static final int FAMILY_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString family_; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + + // required .ScopeType scopeType = 2; + public static final int SCOPETYPE_FIELD_NUMBER = 2; + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_; + public boolean hasScopeType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() { + return scopeType_; + } + + private void initFields() { + family_ = com.google.protobuf.ByteString.EMPTY; + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasFamily()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasScopeType()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeEnum(2, scopeType_.getNumber()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, family_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeEnumSize(2, scopeType_.getNumber()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) obj; + + boolean result = true; + result = result && (hasFamily() == other.hasFamily()); + if (hasFamily()) { + result = result && getFamily() + .equals(other.getFamily()); + } + result = result && (hasScopeType() == other.hasScopeType()); + if (hasScopeType()) { + result = result && + (getScopeType() == other.getScopeType()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasFamily()) { + hash = (37 * hash) + FAMILY_FIELD_NUMBER; + hash = (53 * hash) + getFamily().hashCode(); + } + if (hasScopeType()) { + hash = (37 * hash) + SCOPETYPE_FIELD_NUMBER; + hash = (53 * hash) + hashEnum(getScopeType()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScopeOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_FamilyScope_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + family_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope build() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.family_ = family_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.scopeType_ = scopeType_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.getDefaultInstance()) return this; + if (other.hasFamily()) { + setFamily(other.getFamily()); + } + if (other.hasScopeType()) { + setScopeType(other.getScopeType()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasFamily()) { + + return false; + } + if (!hasScopeType()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + family_ = input.readBytes(); + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + scopeType_ = value; + } + break; + } + } + } + } + + private int bitField0_; + + // required bytes family = 1; + private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamily() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getFamily() { + return family_; + } + public Builder setFamily(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + family_ = value; + onChanged(); + return this; + } + public Builder clearFamily() { + bitField0_ = (bitField0_ & ~0x00000001); + family_ = getDefaultInstance().getFamily(); + onChanged(); + return this; + } + + // required .ScopeType scopeType = 2; + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; + public boolean hasScopeType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType getScopeType() { + return scopeType_; + } + public Builder setScopeType(org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + scopeType_ = value; + onChanged(); + return this; + } + public Builder clearScopeType() { + bitField0_ = (bitField0_ & ~0x00000002); + scopeType_ = org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType.REPLICATION_SCOPE_LOCAL; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:FamilyScope) + } + + static { + defaultInstance = new FamilyScope(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:FamilyScope) + } + + public interface CompactionDescriptorOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required bytes tableName = 1; + boolean hasTableName(); + com.google.protobuf.ByteString getTableName(); + + // required bytes encodedRegionName = 2; + boolean hasEncodedRegionName(); + com.google.protobuf.ByteString getEncodedRegionName(); + + // required bytes familyName = 3; + boolean hasFamilyName(); + com.google.protobuf.ByteString getFamilyName(); + + // repeated string compactionInput = 4; + java.util.List getCompactionInputList(); + int getCompactionInputCount(); + String getCompactionInput(int index); + + // repeated string compactionOutput = 5; + java.util.List getCompactionOutputList(); + int getCompactionOutputCount(); + String getCompactionOutput(int index); + + // required string storeHomeDir = 6; + boolean hasStoreHomeDir(); + String getStoreHomeDir(); + } + public static final class CompactionDescriptor extends + com.google.protobuf.GeneratedMessage + implements CompactionDescriptorOrBuilder { + // Use CompactionDescriptor.newBuilder() to construct. + private CompactionDescriptor(Builder builder) { + super(builder); + } + private CompactionDescriptor(boolean noInit) {} + + private static final CompactionDescriptor defaultInstance; + public static CompactionDescriptor getDefaultInstance() { + return defaultInstance; + } + + public CompactionDescriptor getDefaultInstanceForType() { + return defaultInstance; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_fieldAccessorTable; + } + + private int bitField0_; + // required bytes tableName = 1; + public static final int TABLENAME_FIELD_NUMBER = 1; + private com.google.protobuf.ByteString tableName_; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + + // required bytes encodedRegionName = 2; + public static final int ENCODEDREGIONNAME_FIELD_NUMBER = 2; + private com.google.protobuf.ByteString encodedRegionName_; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + + // required bytes familyName = 3; + public static final int FAMILYNAME_FIELD_NUMBER = 3; + private com.google.protobuf.ByteString familyName_; + public boolean hasFamilyName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getFamilyName() { + return familyName_; + } + + // repeated string compactionInput = 4; + public static final int COMPACTIONINPUT_FIELD_NUMBER = 4; + private com.google.protobuf.LazyStringList compactionInput_; + public java.util.List + getCompactionInputList() { + return compactionInput_; + } + public int getCompactionInputCount() { + return compactionInput_.size(); + } + public String getCompactionInput(int index) { + return compactionInput_.get(index); + } + + // repeated string compactionOutput = 5; + public static final int COMPACTIONOUTPUT_FIELD_NUMBER = 5; + private com.google.protobuf.LazyStringList compactionOutput_; + public java.util.List + getCompactionOutputList() { + return compactionOutput_; + } + public int getCompactionOutputCount() { + return compactionOutput_.size(); + } + public String getCompactionOutput(int index) { + return compactionOutput_.get(index); + } + + // required string storeHomeDir = 6; + public static final int STOREHOMEDIR_FIELD_NUMBER = 6; + private java.lang.Object storeHomeDir_; + public boolean hasStoreHomeDir() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + public String getStoreHomeDir() { + java.lang.Object ref = storeHomeDir_; + if (ref instanceof String) { + return (String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + String s = bs.toStringUtf8(); + if (com.google.protobuf.Internal.isValidUtf8(bs)) { + storeHomeDir_ = s; + } + return s; + } + } + private com.google.protobuf.ByteString getStoreHomeDirBytes() { + java.lang.Object ref = storeHomeDir_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8((String) ref); + storeHomeDir_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + tableName_ = com.google.protobuf.ByteString.EMPTY; + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + familyName_ = com.google.protobuf.ByteString.EMPTY; + compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + storeHomeDir_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasTableName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasEncodedRegionName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasFamilyName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasStoreHomeDir()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, encodedRegionName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, familyName_); + } + for (int i = 0; i < compactionInput_.size(); i++) { + output.writeBytes(4, compactionInput_.getByteString(i)); + } + for (int i = 0; i < compactionOutput_.size(); i++) { + output.writeBytes(5, compactionOutput_.getByteString(i)); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(6, getStoreHomeDirBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, tableName_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, encodedRegionName_); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, familyName_); + } + { + int dataSize = 0; + for (int i = 0; i < compactionInput_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(compactionInput_.getByteString(i)); + } + size += dataSize; + size += 1 * getCompactionInputList().size(); + } + { + int dataSize = 0; + for (int i = 0; i < compactionOutput_.size(); i++) { + dataSize += com.google.protobuf.CodedOutputStream + .computeBytesSizeNoTag(compactionOutput_.getByteString(i)); + } + size += dataSize; + size += 1 * getCompactionOutputList().size(); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(6, getStoreHomeDirBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor other = (org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && (hasEncodedRegionName() == other.hasEncodedRegionName()); + if (hasEncodedRegionName()) { + result = result && getEncodedRegionName() + .equals(other.getEncodedRegionName()); + } + result = result && (hasFamilyName() == other.hasFamilyName()); + if (hasFamilyName()) { + result = result && getFamilyName() + .equals(other.getFamilyName()); + } + result = result && getCompactionInputList() + .equals(other.getCompactionInputList()); + result = result && getCompactionOutputList() + .equals(other.getCompactionOutputList()); + result = result && (hasStoreHomeDir() == other.hasStoreHomeDir()); + if (hasStoreHomeDir()) { + result = result && getStoreHomeDir() + .equals(other.getStoreHomeDir()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + @java.lang.Override + public int hashCode() { + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLENAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (hasEncodedRegionName()) { + hash = (37 * hash) + ENCODEDREGIONNAME_FIELD_NUMBER; + hash = (53 * hash) + getEncodedRegionName().hashCode(); + } + if (hasFamilyName()) { + hash = (37 * hash) + FAMILYNAME_FIELD_NUMBER; + hash = (53 * hash) + getFamilyName().hashCode(); + } + if (getCompactionInputCount() > 0) { + hash = (37 * hash) + COMPACTIONINPUT_FIELD_NUMBER; + hash = (53 * hash) + getCompactionInputList().hashCode(); + } + if (getCompactionOutputCount() > 0) { + hash = (37 * hash) + COMPACTIONOUTPUT_FIELD_NUMBER; + hash = (53 * hash) + getCompactionOutputList().hashCode(); + } + if (hasStoreHomeDir()) { + hash = (37 * hash) + STOREHOMEDIR_FIELD_NUMBER; + hash = (53 * hash) + getStoreHomeDir().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return newBuilder().mergeFrom(data, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom(java.io.InputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + Builder builder = newBuilder(); + if (builder.mergeDelimitedFrom(input, extensionRegistry)) { + return builder.buildParsed(); + } else { + return null; + } + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return newBuilder().mergeFrom(input).buildParsed(); + } + public static org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return newBuilder().mergeFrom(input, extensionRegistry) + .buildParsed(); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptorOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.internal_static_CompactionDescriptor_fieldAccessorTable; + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder(BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + tableName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000001); + encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000002); + familyName_ = com.google.protobuf.ByteString.EMPTY; + bitField0_ = (bitField0_ & ~0x00000004); + compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000010); + storeHomeDir_ = ""; + bitField0_ = (bitField0_ & ~0x00000020); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.getDescriptor(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor build() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + private org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor buildParsed() + throws com.google.protobuf.InvalidProtocolBufferException { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException( + result).asInvalidProtocolBufferException(); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.tableName_ = tableName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.encodedRegionName_ = encodedRegionName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.familyName_ = familyName_; + if (((bitField0_ & 0x00000008) == 0x00000008)) { + compactionInput_ = new com.google.protobuf.UnmodifiableLazyStringList( + compactionInput_); + bitField0_ = (bitField0_ & ~0x00000008); + } + result.compactionInput_ = compactionInput_; + if (((bitField0_ & 0x00000010) == 0x00000010)) { + compactionOutput_ = new com.google.protobuf.UnmodifiableLazyStringList( + compactionOutput_); + bitField0_ = (bitField0_ & ~0x00000010); + } + result.compactionOutput_ = compactionOutput_; + if (((from_bitField0_ & 0x00000020) == 0x00000020)) { + to_bitField0_ |= 0x00000008; + } + result.storeHomeDir_ = storeHomeDir_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.getDefaultInstance()) return this; + if (other.hasTableName()) { + setTableName(other.getTableName()); + } + if (other.hasEncodedRegionName()) { + setEncodedRegionName(other.getEncodedRegionName()); + } + if (other.hasFamilyName()) { + setFamilyName(other.getFamilyName()); + } + if (!other.compactionInput_.isEmpty()) { + if (compactionInput_.isEmpty()) { + compactionInput_ = other.compactionInput_; + bitField0_ = (bitField0_ & ~0x00000008); + } else { + ensureCompactionInputIsMutable(); + compactionInput_.addAll(other.compactionInput_); + } + onChanged(); + } + if (!other.compactionOutput_.isEmpty()) { + if (compactionOutput_.isEmpty()) { + compactionOutput_ = other.compactionOutput_; + bitField0_ = (bitField0_ & ~0x00000010); + } else { + ensureCompactionOutputIsMutable(); + compactionOutput_.addAll(other.compactionOutput_); + } + onChanged(); + } + if (other.hasStoreHomeDir()) { + setStoreHomeDir(other.getStoreHomeDir()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasTableName()) { + + return false; + } + if (!hasEncodedRegionName()) { + + return false; + } + if (!hasFamilyName()) { + + return false; + } + if (!hasStoreHomeDir()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder( + this.getUnknownFields()); + while (true) { + int tag = input.readTag(); + switch (tag) { + case 0: + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + this.setUnknownFields(unknownFields.build()); + onChanged(); + return this; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + tableName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + encodedRegionName_ = input.readBytes(); + break; + } + case 26: { + bitField0_ |= 0x00000004; + familyName_ = input.readBytes(); + break; + } + case 34: { + ensureCompactionInputIsMutable(); + compactionInput_.add(input.readBytes()); + break; + } + case 42: { + ensureCompactionOutputIsMutable(); + compactionOutput_.add(input.readBytes()); + break; + } + case 50: { + bitField0_ |= 0x00000020; + storeHomeDir_ = input.readBytes(); + break; + } + } + } + } + + private int bitField0_; + + // required bytes tableName = 1; + private com.google.protobuf.ByteString tableName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + public com.google.protobuf.ByteString getTableName() { + return tableName_; + } + public Builder setTableName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + tableName_ = value; + onChanged(); + return this; + } + public Builder clearTableName() { + bitField0_ = (bitField0_ & ~0x00000001); + tableName_ = getDefaultInstance().getTableName(); + onChanged(); + return this; + } + + // required bytes encodedRegionName = 2; + private com.google.protobuf.ByteString encodedRegionName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasEncodedRegionName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + public com.google.protobuf.ByteString getEncodedRegionName() { + return encodedRegionName_; + } + public Builder setEncodedRegionName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + encodedRegionName_ = value; + onChanged(); + return this; + } + public Builder clearEncodedRegionName() { + bitField0_ = (bitField0_ & ~0x00000002); + encodedRegionName_ = getDefaultInstance().getEncodedRegionName(); + onChanged(); + return this; + } + + // required bytes familyName = 3; + private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; + public boolean hasFamilyName() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + public com.google.protobuf.ByteString getFamilyName() { + return familyName_; + } + public Builder setFamilyName(com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + familyName_ = value; + onChanged(); + return this; + } + public Builder clearFamilyName() { + bitField0_ = (bitField0_ & ~0x00000004); + familyName_ = getDefaultInstance().getFamilyName(); + onChanged(); + return this; + } + + // repeated string compactionInput = 4; + private com.google.protobuf.LazyStringList compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureCompactionInputIsMutable() { + if (!((bitField0_ & 0x00000008) == 0x00000008)) { + compactionInput_ = new com.google.protobuf.LazyStringArrayList(compactionInput_); + bitField0_ |= 0x00000008; + } + } + public java.util.List + getCompactionInputList() { + return java.util.Collections.unmodifiableList(compactionInput_); + } + public int getCompactionInputCount() { + return compactionInput_.size(); + } + public String getCompactionInput(int index) { + return compactionInput_.get(index); + } + public Builder setCompactionInput( + int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureCompactionInputIsMutable(); + compactionInput_.set(index, value); + onChanged(); + return this; + } + public Builder addCompactionInput(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureCompactionInputIsMutable(); + compactionInput_.add(value); + onChanged(); + return this; + } + public Builder addAllCompactionInput( + java.lang.Iterable values) { + ensureCompactionInputIsMutable(); + super.addAll(values, compactionInput_); + onChanged(); + return this; + } + public Builder clearCompactionInput() { + compactionInput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000008); + onChanged(); + return this; + } + void addCompactionInput(com.google.protobuf.ByteString value) { + ensureCompactionInputIsMutable(); + compactionInput_.add(value); + onChanged(); + } + + // repeated string compactionOutput = 5; + private com.google.protobuf.LazyStringList compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + private void ensureCompactionOutputIsMutable() { + if (!((bitField0_ & 0x00000010) == 0x00000010)) { + compactionOutput_ = new com.google.protobuf.LazyStringArrayList(compactionOutput_); + bitField0_ |= 0x00000010; + } + } + public java.util.List + getCompactionOutputList() { + return java.util.Collections.unmodifiableList(compactionOutput_); + } + public int getCompactionOutputCount() { + return compactionOutput_.size(); + } + public String getCompactionOutput(int index) { + return compactionOutput_.get(index); + } + public Builder setCompactionOutput( + int index, String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureCompactionOutputIsMutable(); + compactionOutput_.set(index, value); + onChanged(); + return this; + } + public Builder addCompactionOutput(String value) { + if (value == null) { + throw new NullPointerException(); + } + ensureCompactionOutputIsMutable(); + compactionOutput_.add(value); + onChanged(); + return this; + } + public Builder addAllCompactionOutput( + java.lang.Iterable values) { + ensureCompactionOutputIsMutable(); + super.addAll(values, compactionOutput_); + onChanged(); + return this; + } + public Builder clearCompactionOutput() { + compactionOutput_ = com.google.protobuf.LazyStringArrayList.EMPTY; + bitField0_ = (bitField0_ & ~0x00000010); + onChanged(); + return this; + } + void addCompactionOutput(com.google.protobuf.ByteString value) { + ensureCompactionOutputIsMutable(); + compactionOutput_.add(value); + onChanged(); + } + + // required string storeHomeDir = 6; + private java.lang.Object storeHomeDir_ = ""; + public boolean hasStoreHomeDir() { + return ((bitField0_ & 0x00000020) == 0x00000020); + } + public String getStoreHomeDir() { + java.lang.Object ref = storeHomeDir_; + if (!(ref instanceof String)) { + String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); + storeHomeDir_ = s; + return s; + } else { + return (String) ref; + } + } + public Builder setStoreHomeDir(String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000020; + storeHomeDir_ = value; + onChanged(); + return this; + } + public Builder clearStoreHomeDir() { + bitField0_ = (bitField0_ & ~0x00000020); + storeHomeDir_ = getDefaultInstance().getStoreHomeDir(); + onChanged(); + return this; + } + void setStoreHomeDir(com.google.protobuf.ByteString value) { + bitField0_ |= 0x00000020; + storeHomeDir_ = value; + onChanged(); + } + + // @@protoc_insertion_point(builder_scope:CompactionDescriptor) + } + + static { + defaultInstance = new CompactionDescriptor(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:CompactionDescriptor) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALHeader_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALHeader_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_WALKey_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_WALKey_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_FamilyScope_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_FamilyScope_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_CompactionDescriptor_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_CompactionDescriptor_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\tWAL.proto\032\013hbase.proto\"#\n\tWALHeader\022\026\n" + + "\016hasCompression\030\001 \001(\010\"\266\001\n\006WALKey\022\031\n\021enco" + + "dedRegionName\030\001 \002(\014\022\021\n\ttableName\030\002 \002(\014\022\031" + + "\n\021logSequenceNumber\030\003 \002(\004\022\021\n\twriteTime\030\004" + + " \002(\004\022\030\n\tclusterId\030\005 \001(\0132\005.UUID\022\034\n\006scopes" + + "\030\006 \003(\0132\014.FamilyScope\022\030\n\020followingKvCount" + + "\030\007 \001(\r\"<\n\013FamilyScope\022\016\n\006family\030\001 \002(\014\022\035\n" + + "\tscopeType\030\002 \002(\0162\n.ScopeType\"\241\001\n\024Compact" + + "ionDescriptor\022\021\n\ttableName\030\001 \002(\014\022\031\n\021enco" + + "dedRegionName\030\002 \002(\014\022\022\n\nfamilyName\030\003 \002(\014\022", + "\027\n\017compactionInput\030\004 \003(\t\022\030\n\020compactionOu" + + "tput\030\005 \003(\t\022\024\n\014storeHomeDir\030\006 \002(\t*F\n\tScop" + + "eType\022\033\n\027REPLICATION_SCOPE_LOCAL\020\000\022\034\n\030RE" + + "PLICATION_SCOPE_GLOBAL\020\001B?\n*org.apache.h" + + "adoop.hbase.protobuf.generatedB\tWALProto" + + "sH\001\210\001\000\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_WALHeader_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_WALHeader_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALHeader_descriptor, + new java.lang.String[] { "HasCompression", }, + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.class, + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader.Builder.class); + internal_static_WALKey_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_WALKey_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_WALKey_descriptor, + new java.lang.String[] { "EncodedRegionName", "TableName", "LogSequenceNumber", "WriteTime", "ClusterId", "Scopes", "FollowingKvCount", }, + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.class, + org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder.class); + internal_static_FamilyScope_descriptor = + getDescriptor().getMessageTypes().get(2); + internal_static_FamilyScope_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_FamilyScope_descriptor, + new java.lang.String[] { "Family", "ScopeType", }, + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.class, + org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope.Builder.class); + internal_static_CompactionDescriptor_descriptor = + getDescriptor().getMessageTypes().get(3); + internal_static_CompactionDescriptor_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_CompactionDescriptor_descriptor, + new java.lang.String[] { "TableName", "EncodedRegionName", "FamilyName", "CompactionInput", "CompactionOutput", "StoreHomeDir", }, + org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.class, + org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor.Builder.class); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hbase-protocol/src/main/protobuf/Admin.proto b/hbase-protocol/src/main/protobuf/Admin.proto index b56ea1df3a4..6ef0ace7e08 100644 --- a/hbase-protocol/src/main/protobuf/Admin.proto +++ b/hbase-protocol/src/main/protobuf/Admin.proto @@ -25,6 +25,7 @@ option java_generate_equals_and_hash = true; option optimize_for = SPEED; import "hbase.proto"; +import "WAL.proto"; message GetRegionInfoRequest { required RegionSpecifier region = 1; @@ -155,39 +156,10 @@ message MergeRegionsRequest { message MergeRegionsResponse { } -message UUID { - required uint64 leastSigBits = 1; - required uint64 mostSigBits = 2; -} - -// Protocol buffer version of HLog +// Protocol buffer version of WAL for replication message WALEntry { required WALKey key = 1; - required WALEdit edit = 2; - - // Protocol buffer version of HLogKey - message WALKey { - required bytes encodedRegionName = 1; - required bytes tableName = 2; - required uint64 logSequenceNumber = 3; - required uint64 writeTime = 4; - optional UUID clusterId = 5; - } - - message WALEdit { - repeated bytes keyValueBytes = 1; - repeated FamilyScope familyScope = 2; - - enum ScopeType { - REPLICATION_SCOPE_LOCAL = 0; - REPLICATION_SCOPE_GLOBAL = 1; - } - - message FamilyScope { - required bytes family = 1; - required ScopeType scopeType = 2; - } - } + repeated bytes keyValueBytes = 2; } /** diff --git a/hbase-protocol/src/main/protobuf/WAL.proto b/hbase-protocol/src/main/protobuf/WAL.proto index 59b303ef204..43cff5943f1 100644 --- a/hbase-protocol/src/main/protobuf/WAL.proto +++ b/hbase-protocol/src/main/protobuf/WAL.proto @@ -16,14 +16,48 @@ * limitations under the License. */ option java_package = "org.apache.hadoop.hbase.protobuf.generated"; -option java_outer_classname = "WAL"; +option java_outer_classname = "WALProtos"; +option java_generic_services = false; option java_generate_equals_and_hash = true; option optimize_for = SPEED; import "hbase.proto"; +message WALHeader { + optional bool hasCompression = 1; +} + +// Protocol buffer version of HLogKey; see HLogKey comment, not really a key but WALEdit header for some KVs +message WALKey { + required bytes encodedRegionName = 1; + required bytes tableName = 2; + required uint64 logSequenceNumber = 3; + required uint64 writeTime = 4; + optional UUID clusterId = 5; + + repeated FamilyScope scopes = 6; + optional uint32 followingKvCount = 7; +/* + optional CustomEntryType customEntryType = 8; + + enum CustomEntryType { + COMPACTION = 0; + } +*/ +} + +enum ScopeType { + REPLICATION_SCOPE_LOCAL = 0; + REPLICATION_SCOPE_GLOBAL = 1; +} + +message FamilyScope { + required bytes family = 1; + required ScopeType scopeType = 2; +} + /** - * WAL entries + * Custom WAL entries */ /** @@ -33,7 +67,7 @@ import "hbase.proto"; * the * compaction should we fail the WAL write. */ message CompactionDescriptor { - required bytes tableName = 1; + required bytes tableName = 1; // TODO: WALKey already stores these, might remove required bytes encodedRegionName = 2; required bytes familyName = 3; repeated string compactionInput = 4; diff --git a/hbase-protocol/src/main/protobuf/hbase.proto b/hbase-protocol/src/main/protobuf/hbase.proto index cd04872efb1..b43ad267866 100644 --- a/hbase-protocol/src/main/protobuf/hbase.proto +++ b/hbase-protocol/src/main/protobuf/hbase.proto @@ -301,3 +301,8 @@ message LongMsg { message BigDecimalMsg { required bytes bigdecimalMsg = 1; } + +message UUID { + required uint64 leastSigBits = 1; + required uint64 mostSigBits = 2; +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java index 820e70b5ddc..ccc61ac427f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ReplicationProtbufUtil.java @@ -26,6 +26,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.AdminProtocol; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; @@ -47,32 +49,23 @@ public class ReplicationProtbufUtil { * @return an array of HLog entries */ public static HLog.Entry[] - toHLogEntries(final List protoList) { + toHLogEntries(final List protoList) throws IOException { List entries = new ArrayList(); for (AdminProtos.WALEntry entry: protoList) { - AdminProtos.WALEntry.WALKey walKey = entry.getKey(); - java.util.UUID clusterId = HConstants.DEFAULT_CLUSTER_ID; - if (walKey.hasClusterId()) { - AdminProtos.UUID protoUuid = walKey.getClusterId(); - clusterId = new java.util.UUID( - protoUuid.getMostSigBits(), protoUuid.getLeastSigBits()); - } - HLogKey key = new HLogKey(walKey.getEncodedRegionName().toByteArray(), - walKey.getTableName().toByteArray(), walKey.getLogSequenceNumber(), - walKey.getWriteTime(), clusterId); - AdminProtos.WALEntry.WALEdit walEdit = entry.getEdit(); + WALProtos.WALKey walKey = entry.getKey(); + HLogKey key = new HLogKey(walKey); WALEdit edit = new WALEdit(); - for (ByteString keyValue: walEdit.getKeyValueBytesList()) { + for (ByteString keyValue: entry.getKeyValueBytesList()) { edit.add(new KeyValue(keyValue.toByteArray())); } - if (walEdit.getFamilyScopeCount() > 0) { + if (walKey.getScopesCount() > 0) { TreeMap scopes = new TreeMap(Bytes.BYTES_COMPARATOR); - for (AdminProtos.WALEntry.WALEdit.FamilyScope scope: walEdit.getFamilyScopeList()) { + for (WALProtos.FamilyScope scope: walKey.getScopesList()) { scopes.put(scope.getFamily().toByteArray(), Integer.valueOf(scope.getScopeType().ordinal())); } - edit.setScopes(scopes); + key.setScopes(scopes); } entries.add(new HLog.Entry(key, edit)); } @@ -105,16 +98,13 @@ public class ReplicationProtbufUtil { */ public static AdminProtos.ReplicateWALEntryRequest buildReplicateWALEntryRequest(final HLog.Entry[] entries) { - AdminProtos.WALEntry.WALEdit.FamilyScope.Builder scopeBuilder = AdminProtos.WALEntry - .WALEdit - .FamilyScope - .newBuilder(); + WALProtos.FamilyScope.Builder scopeBuilder = WALProtos.FamilyScope.newBuilder(); AdminProtos.WALEntry.Builder entryBuilder = AdminProtos.WALEntry.newBuilder(); AdminProtos.ReplicateWALEntryRequest.Builder builder = AdminProtos.ReplicateWALEntryRequest.newBuilder(); for (HLog.Entry entry: entries) { entryBuilder.clear(); - AdminProtos.WALEntry.WALKey.Builder keyBuilder = entryBuilder.getKeyBuilder(); + WALProtos.WALKey.Builder keyBuilder = entryBuilder.getKeyBuilder(); HLogKey key = entry.getKey(); keyBuilder.setEncodedRegionName( ByteString.copyFrom(key.getEncodedRegionName())); @@ -123,28 +113,24 @@ public class ReplicationProtbufUtil { keyBuilder.setWriteTime(key.getWriteTime()); UUID clusterId = key.getClusterId(); if (clusterId != null) { - AdminProtos.UUID.Builder uuidBuilder = keyBuilder.getClusterIdBuilder(); + HBaseProtos.UUID.Builder uuidBuilder = keyBuilder.getClusterIdBuilder(); uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits()); uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits()); } WALEdit edit = entry.getEdit(); - AdminProtos.WALEntry.WALEdit.Builder editBuilder = entryBuilder.getEditBuilder(); - NavigableMap scopes = edit.getScopes(); + NavigableMap scopes = key.getScopes(); if (scopes != null && !scopes.isEmpty()) { for (Map.Entry scope: scopes.entrySet()) { scopeBuilder.setFamily(ByteString.copyFrom(scope.getKey())); - AdminProtos.WALEntry.WALEdit.ScopeType - scopeType = AdminProtos.WALEntry - .WALEdit - .ScopeType - .valueOf(scope.getValue().intValue()); + WALProtos.ScopeType scopeType = + WALProtos.ScopeType.valueOf(scope.getValue().intValue()); scopeBuilder.setScopeType(scopeType); - editBuilder.addFamilyScope(scopeBuilder.build()); + keyBuilder.addScopes(scopeBuilder.build()); } } List keyValues = edit.getKeyValues(); for (KeyValue value: keyValues) { - editBuilder.addKeyValueBytes(ByteString.copyFrom( + entryBuilder.addKeyValueBytes(ByteString.copyFrom( value.getBuffer(), value.getOffset(), value.getLength())); } builder.addEntry(entryBuilder.build()); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index bd9c73b0c40..bc1ef09d2df 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -115,7 +115,7 @@ import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState; import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription; -import org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.regionserver.MultiVersionConsistencyControl.WriteEntry; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.wal.HLog; @@ -2890,7 +2890,7 @@ public class HRegion implements HeapSize { // , Writable{ skippedEdits++; continue; - } + } // Figure which store the edit is meant for. if (store == null || !kv.matchingFamily(store.getFamily().getName())) { store = this.stores.get(kv.getFamily()); @@ -2972,7 +2972,8 @@ public class HRegion implements HeapSize { // , Writable{ throws IOException { Store store = this.getStore(compaction.getFamilyName().toByteArray()); if (store == null) { - LOG.warn("Found Compaction WAL edit for deleted family:" + Bytes.toString(compaction.getFamilyName().toByteArray())); + LOG.warn("Found Compaction WAL edit for deleted family:" + + Bytes.toString(compaction.getFamilyName().toByteArray())); return; } store.completeCompactionMarker(compaction); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 907534bf299..36db9226162 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -63,7 +63,7 @@ import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java index f053398de5d..1f2017c5b6f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/Store.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; -import org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext; import org.apache.hadoop.hbase.regionserver.compactions.CompactionProgress; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java index a7b301d3716..f83217a3da3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/Compressor.java @@ -25,6 +25,7 @@ import java.io.IOException; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableUtils; @@ -32,6 +33,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import com.google.common.base.Preconditions; +import com.google.protobuf.ByteString; /** * A set of static functions for running our custom WAL compression/decompression. @@ -63,26 +65,31 @@ public class Compressor { private static void transformFile(Path input, Path output) throws IOException { - SequenceFileLogReader in = new SequenceFileLogReader(); - SequenceFileLogWriter out = new SequenceFileLogWriter(); + Configuration conf = HBaseConfiguration.create(); + + FileSystem inFS = input.getFileSystem(conf); + FileSystem outFS = output.getFileSystem(conf); + + HLog.Reader in = HLogFactory.createReader(inFS, input, conf, null, false); + HLog.Writer out = null; try { - Configuration conf = HBaseConfiguration.create(); - - FileSystem inFS = input.getFileSystem(conf); - FileSystem outFS = output.getFileSystem(conf); - - in.init(inFS, input, conf); - boolean compress = in.reader.isWALCompressionEnabled(); - + if (!(in instanceof ReaderBase)) { + System.err.println("Cannot proceed, invalid reader type: " + in.getClass().getName()); + return; + } + boolean compress = ((ReaderBase)in).hasCompression(); conf.setBoolean(HConstants.ENABLE_WAL_COMPRESSION, !compress); - out.init(outFS, output, conf); + out = HLogFactory.createWriter(outFS, output, conf); HLog.Entry e = null; while ((e = in.next()) != null) out.append(e); } finally { in.close(); - out.close(); + if (out != null) { + out.close(); + out = null; + } } } @@ -93,6 +100,7 @@ public class Compressor { * @param dict the dictionary we use for our read. * @return the uncompressed array. */ + @Deprecated static byte[] readCompressed(DataInput in, Dictionary dict) throws IOException { byte status = in.readByte(); @@ -129,6 +137,7 @@ public class Compressor { * * @return the length of the uncompressed data */ + @Deprecated static int uncompressIntoArray(byte[] to, int offset, DataInput in, Dictionary dict) throws IOException { byte status = in.readByte(); @@ -167,6 +176,7 @@ public class Compressor { * @param out the DataOutput to write into * @param dict the dictionary to use for compression */ + @Deprecated static void writeCompressed(byte[] data, int offset, int length, DataOutput out, Dictionary dict) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java index ed36122cf3b..00e2590f624 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/FSHLog.java @@ -527,12 +527,10 @@ class FSHLog implements HLog, Syncable { } } FSHLog.Writer nextWriter = this.createWriterInstance(fs, newPath, conf); - // Can we get at the dfsclient outputstream? If an instance of - // SFLW, it'll have done the necessary reflection to get at the - // protected field name. + // Can we get at the dfsclient outputstream? FSDataOutputStream nextHdfsOut = null; - if (nextWriter instanceof SequenceFileLogWriter) { - nextHdfsOut = ((SequenceFileLogWriter)nextWriter).getWriterFSDataOutputStream(); + if (nextWriter instanceof ProtobufLogWriter) { + nextHdfsOut = ((ProtobufLogWriter)nextWriter).getStream(); } Path oldFile = null; @@ -853,43 +851,6 @@ class FSHLog implements HLog, Syncable { return new HLogKey(encodedRegionName, tableName, seqnum, now, clusterId); } - @Override - public long append(HRegionInfo regionInfo, HLogKey logKey, WALEdit logEdit, - HTableDescriptor htd, boolean doSync) - throws IOException { - if (this.closed) { - throw new IOException("Cannot append; log is closed"); - } - long txid = 0; - synchronized (updateLock) { - long seqNum = obtainSeqNum(); - logKey.setLogSeqNum(seqNum); - // The 'lastSeqWritten' map holds the sequence number of the oldest - // write for each region (i.e. the first edit added to the particular - // memstore). When the cache is flushed, the entry for the - // region being flushed is removed if the sequence number of the flush - // is greater than or equal to the value in lastSeqWritten. - this.oldestUnflushedSeqNums.putIfAbsent(regionInfo.getEncodedNameAsBytes(), - Long.valueOf(seqNum)); - doWrite(regionInfo, logKey, logEdit, htd); - txid = this.unflushedEntries.incrementAndGet(); - this.numEntries.incrementAndGet(); - if (htd.isDeferredLogFlush()) { - lastDeferredTxid = txid; - } - } - - // Sync if catalog region, and if not then check if that table supports - // deferred log flushing - if (doSync && - (regionInfo.isMetaRegion() || - !htd.isDeferredLogFlush())) { - // sync txn to file system - this.sync(txid); - } - return txid; - } - @Override public void append(HRegionInfo info, byte [] tableName, WALEdit edits, final long now, HTableDescriptor htd) @@ -1456,4 +1417,4 @@ class FSHLog implements HLog, Syncable { System.exit(-1); } } -} \ No newline at end of file +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java index bba8b32c024..42ba3cb6948 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java @@ -29,6 +29,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HRegionInfo; @@ -51,7 +52,14 @@ public interface HLog { public static final String RECOVERED_LOG_TMPFILE_SUFFIX = ".temp"; public interface Reader { - void init(FileSystem fs, Path path, Configuration c) throws IOException; + + /** + * @param fs File system. + * @param path Path. + * @param c Config. + * @param s Input stream that may have been pre-opened by the caller; may be null. + */ + void init(FileSystem fs, Path path, Configuration c, FSDataInputStream s) throws IOException; void close() throws IOException; @@ -234,20 +242,6 @@ public interface HLog { */ public void closeAndDelete() throws IOException; - /** - * Append an entry to the log. - * - * @param regionInfo - * @param logEdit - * @param logKey - * @param doSync - * shall we sync after writing the transaction - * @return The txid of this transaction - * @throws IOException - */ - public long append(HRegionInfo regionInfo, HLogKey logKey, WALEdit logEdit, - HTableDescriptor htd, boolean doSync) throws IOException; - /** * Only used in tests. * diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java index a91284ab8d0..83db320d4c7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogFactory.java @@ -21,23 +21,26 @@ package org.apache.hadoop.hbase.regionserver.wal; import java.io.IOException; +import java.util.Arrays; import java.io.InterruptedIOException; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.regionserver.wal.HLog.Reader; import org.apache.hadoop.hbase.regionserver.wal.HLog.Writer; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CancelableProgressable; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; public class HLogFactory { private static final Log LOG = LogFactory.getLog(HLogFactory.class); - + public static HLog createHLog(final FileSystem fs, final Path root, final String logName, final Configuration conf) throws IOException { return new FSHLog(fs, root, logName, conf); @@ -60,13 +63,12 @@ public class HLogFactory { return new FSHLog(fs, root, logName, HConstants.HREGION_OLDLOGDIR_NAME, conf, listeners, false, prefix, true); } - + /* * WAL Reader */ - private static Class logReaderClass; - + static void resetLogReaderClass() { logReaderClass = null; } @@ -85,10 +87,17 @@ public class HLogFactory { */ public static HLog.Reader createReader(final FileSystem fs, final Path path, Configuration conf, CancelableProgressable reporter) throws IOException { - if (logReaderClass == null) { + return createReader(fs, path, conf, reporter, true); + } + + public static HLog.Reader createReader(final FileSystem fs, final Path path, + Configuration conf, CancelableProgressable reporter, boolean allowCustom) + throws IOException { + if (allowCustom && (logReaderClass == null)) { logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", - SequenceFileLogReader.class, Reader.class); + ProtobufLogReader.class, Reader.class); } + Class lrClass = allowCustom ? logReaderClass : ProtobufLogReader.class; try { // A hlog file could be under recovery, so it may take several @@ -99,9 +108,25 @@ public class HLogFactory { int nbAttempt = 0; while (true) { try { - HLog.Reader reader = logReaderClass.newInstance(); - reader.init(fs, path, conf); - return reader; + if (lrClass != ProtobufLogReader.class) { + // User is overriding the WAL reader, let them. + HLog.Reader reader = lrClass.newInstance(); + reader.init(fs, path, conf, null); + return reader; + } else { + FSDataInputStream stream = fs.open(path); + // Note that zero-length file will fail to read PB magic, and attempt to create + // a non-PB reader and fail the same way existing code expects it to. If we get + // rid of the old reader entirely, we need to handle 0-size files differently from + // merely non-PB files. + byte[] magic = new byte[ProtobufLogReader.PB_WAL_MAGIC.length]; + boolean isPbWal = (stream.read(magic) == magic.length) + && Arrays.equals(magic, ProtobufLogReader.PB_WAL_MAGIC); + HLog.Reader reader = + isPbWal ? new ProtobufLogReader() : new SequenceFileLogReader(); + reader.init(fs, path, conf, stream); + return reader; + } } catch (IOException e) { String msg = e.getMessage(); if (msg != null && msg.contains("Cannot obtain block length")) { @@ -139,9 +164,8 @@ public class HLogFactory { /* * WAL writer */ - private static Class logWriterClass; - + /** * Create a writer for the WAL. * @return A WAL writer. Close when done with it. @@ -153,9 +177,9 @@ public class HLogFactory { try { if (logWriterClass == null) { logWriterClass = conf.getClass("hbase.regionserver.hlog.writer.impl", - SequenceFileLogWriter.class, Writer.class); + ProtobufLogWriter.class, Writer.class); } - HLog.Writer writer = (HLog.Writer) logWriterClass.newInstance(); + HLog.Writer writer = (HLog.Writer)logWriterClass.newInstance(); writer.init(fs, path, conf); return writer; } catch (Exception e) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java index ac90a539cb9..0028abebc3f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogKey.java @@ -22,16 +22,27 @@ import java.io.DataInput; import java.io.DataOutput; import java.io.EOFException; import java.io.IOException; +import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Map; +import java.util.NavigableMap; +import java.util.TreeMap; import java.util.UUID; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.FamilyScope; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.ScopeType; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableUtils; +import com.google.protobuf.ByteString; + /** * A Key for an entry in the change log. * @@ -42,8 +53,12 @@ import org.apache.hadoop.io.WritableUtils; *

Some Transactional edits (START, COMMIT, ABORT) will not have an * associated row. */ +// TODO: Key and WALEdit are never used separately, or in one-to-many relation, for practical +// purposes. They need to be merged into HLogEntry. @InterfaceAudience.Private public class HLogKey implements WritableComparable { + public static final Log LOG = LogFactory.getLog(HLogKey.class); + // should be < 0 (@see #readFields(DataInput)) // version 2 supports HLog compression enum Version { @@ -89,16 +104,17 @@ public class HLogKey implements WritableComparable { private UUID clusterId; + private NavigableMap scopes; + private CompressionContext compressionContext; - /** Writable Constructor -- Do not use. */ public HLogKey() { this(null, null, 0L, HConstants.LATEST_TIMESTAMP, HConstants.DEFAULT_CLUSTER_ID); } /** - * Create the log key! + * Create the log key for writing to somewhere. * We maintain the tablename mainly for debugging purposes. * A regionName is always a sub-table object. * @@ -111,11 +127,19 @@ public class HLogKey implements WritableComparable { */ public HLogKey(final byte [] encodedRegionName, final byte [] tablename, long logSeqNum, final long now, UUID clusterId) { - this.encodedRegionName = encodedRegionName; - this.tablename = tablename; this.logSeqNum = logSeqNum; this.writeTime = now; this.clusterId = clusterId; + this.encodedRegionName = encodedRegionName; + this.tablename = tablename; + } + + /** + * Create HLogKey wrapper around protobuf WAL key; takes care of compression. + * @throws IOException Never, as the compression is not enabled. + */ + public HLogKey(WALKey walKey) throws IOException { + readFieldsFromPb(walKey, null); } /** @@ -137,11 +161,7 @@ public class HLogKey implements WritableComparable { /** @return log sequence number */ public long getLogSeqNum() { - return logSeqNum; - } - - void setLogSeqNum(long logSeqNum) { - this.logSeqNum = logSeqNum; + return this.logSeqNum; } /** @@ -159,8 +179,16 @@ public class HLogKey implements WritableComparable { return clusterId; } + public NavigableMap getScopes() { + return scopes; + } + + public void setScopes(NavigableMap scopes) { + this.scopes = scopes; + } + /** - * Set the cluster id of this key + * Set the cluster id of this key. * @param clusterId */ public void setClusterId(UUID clusterId) { @@ -213,7 +241,7 @@ public class HLogKey implements WritableComparable { if (result == 0) { if (this.logSeqNum < o.logSeqNum) { result = -1; - } else if (this.logSeqNum > o.logSeqNum) { + } else if (this.logSeqNum > o.logSeqNum ) { result = 1; } if (result == 0) { @@ -255,7 +283,9 @@ public class HLogKey implements WritableComparable { } @Override + @Deprecated public void write(DataOutput out) throws IOException { + LOG.warn("HLogKey is being serialized to writable - only expected in test code"); WritableUtils.writeVInt(out, VERSION.code); if (compressionContext == null) { Bytes.writeByteArray(out, this.encodedRegionName); @@ -290,6 +320,7 @@ public class HLogKey implements WritableComparable { // encodes the length of encodedRegionName. // If < 0 we just read the version and the next vint is the length. // @see Bytes#readByteArray(DataInput) + this.scopes = null; // writable HLogKey does not contain scopes int len = WritableUtils.readVInt(in); if (len < 0) { // what we just read was the version @@ -308,7 +339,7 @@ public class HLogKey implements WritableComparable { this.encodedRegionName = Compressor.readCompressed(in, compressionContext.regionDict); this.tablename = Compressor.readCompressed(in, compressionContext.tableDict); } - + this.logSeqNum = in.readLong(); this.writeTime = in.readLong(); this.clusterId = HConstants.DEFAULT_CLUSTER_ID; @@ -325,4 +356,62 @@ public class HLogKey implements WritableComparable { } } } + + public WALKey.Builder getBuilder( + WALCellCodec.ByteStringCompressor compressor) throws IOException { + WALKey.Builder builder = WALKey.newBuilder(); + if (compressionContext == null) { + builder.setEncodedRegionName(ByteString.copyFrom(this.encodedRegionName)); + builder.setTableName(ByteString.copyFrom(this.tablename)); + } else { + builder.setEncodedRegionName( + compressor.compress(this.encodedRegionName, compressionContext.regionDict)); + builder.setTableName(compressor.compress(this.tablename, compressionContext.tableDict)); + } + builder.setLogSequenceNumber(this.logSeqNum); + builder.setWriteTime(writeTime); + if (this.clusterId != HConstants.DEFAULT_CLUSTER_ID) { + builder.setClusterId(HBaseProtos.UUID.newBuilder() + .setLeastSigBits(this.clusterId.getLeastSignificantBits()) + .setMostSigBits(this.clusterId.getMostSignificantBits())); + } + if (scopes != null) { + for (Map.Entry e : scopes.entrySet()) { + ByteString family = (compressionContext == null) ? ByteString.copyFrom(e.getKey()) + : compressor.compress(e.getKey(), compressionContext.familyDict); + builder.addScopes(FamilyScope.newBuilder() + .setFamily(family).setScopeType(ScopeType.valueOf(e.getValue()))); + } + } + return builder; + } + + public void readFieldsFromPb( + WALKey walKey, WALCellCodec.ByteStringUncompressor uncompressor) throws IOException { + if (this.compressionContext != null) { + this.encodedRegionName = uncompressor.uncompress( + walKey.getEncodedRegionName(), compressionContext.regionDict); + this.tablename = uncompressor.uncompress( + walKey.getTableName(), compressionContext.tableDict); + } else { + this.encodedRegionName = walKey.getEncodedRegionName().toByteArray(); + this.tablename = walKey.getTableName().toByteArray(); + } + this.clusterId = HConstants.DEFAULT_CLUSTER_ID; + if (walKey.hasClusterId()) { + this.clusterId = new UUID( + walKey.getClusterId().getMostSigBits(), walKey.getClusterId().getLeastSigBits()); + } + this.scopes = null; + if (walKey.getScopesCount() > 0) { + this.scopes = new TreeMap(Bytes.BYTES_COMPARATOR); + for (FamilyScope scope : walKey.getScopesList()) { + byte[] family = (compressionContext == null) ? scope.getFamily().toByteArray() : + uncompressor.uncompress(scope.getFamily(), compressionContext.familyDict); + this.scopes.put(family, scope.getScopeType().getNumber()); + } + } + this.logSeqNum = walKey.getLogSequenceNumber(); + this.writeTime = walKey.getWriteTime(); + } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java index 45ac26597e1..01e13c88813 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogUtil.java @@ -36,30 +36,13 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.ServerName; -import org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.FSUtils; public class HLogUtil { static final Log LOG = LogFactory.getLog(HLogUtil.class); - @SuppressWarnings("unchecked") - public static Class getKeyClass(Configuration conf) { - return (Class) conf.getClass( - "hbase.regionserver.hlog.keyclass", HLogKey.class); - } - - public static HLogKey newKey(Configuration conf) throws IOException { - Class keyClass = getKeyClass(conf); - try { - return keyClass.newInstance(); - } catch (InstantiationException e) { - throw new IOException("cannot create hlog key"); - } catch (IllegalAccessException e) { - throw new IOException("cannot create hlog key"); - } - } - /** * Pattern used to validate a HLog file name */ @@ -76,52 +59,6 @@ public class HLogUtil { return pattern.matcher(filename).matches(); } - /* - * Get a reader for the WAL. - * - * @param fs - * - * @param path - * - * @param conf - * - * @return A WAL reader. Close when done with it. - * - * @throws IOException - * - * public static HLog.Reader getReader(final FileSystem fs, final Path path, - * Configuration conf) throws IOException { try { - * - * if (logReaderClass == null) { - * - * logReaderClass = conf.getClass("hbase.regionserver.hlog.reader.impl", - * SequenceFileLogReader.class, Reader.class); } - * - * - * HLog.Reader reader = logReaderClass.newInstance(); reader.init(fs, path, - * conf); return reader; } catch (IOException e) { throw e; } catch (Exception - * e) { throw new IOException("Cannot get log reader", e); } } - * - * * Get a writer for the WAL. - * - * @param path - * - * @param conf - * - * @return A WAL writer. Close when done with it. - * - * @throws IOException - * - * public static HLog.Writer createWriter(final FileSystem fs, final Path - * path, Configuration conf) throws IOException { try { if (logWriterClass == - * null) { logWriterClass = - * conf.getClass("hbase.regionserver.hlog.writer.impl", - * SequenceFileLogWriter.class, Writer.class); } FSHLog.Writer writer = - * (FSHLog.Writer) logWriterClass.newInstance(); writer.init(fs, path, conf); - * return writer; } catch (Exception e) { throw new - * IOException("cannot get log writer", e); } } - */ - /** * Construct the HLog directory name * @@ -285,11 +222,11 @@ public class HLogUtil { /** * Write the marker that a compaction has succeeded and is about to be committed. * This provides info to the HMaster to allow it to recover the compaction if - * this regionserver dies in the middle (This part is not yet implemented). It also prevents the compaction from - * finishing if this regionserver has already lost its lease on the log. + * this regionserver dies in the middle (This part is not yet implemented). It also prevents + * the compaction from finishing if this regionserver has already lost its lease on the log. */ - public static void writeCompactionMarker(HLog log, HTableDescriptor htd, HRegionInfo info, final CompactionDescriptor c) - throws IOException { + public static void writeCompactionMarker(HLog log, HTableDescriptor htd, HRegionInfo info, + final CompactionDescriptor c) throws IOException { WALEdit e = WALEdit.createCompaction(c); log.append(info, c.getTableName().toByteArray(), e, EnvironmentEdgeManager.currentTimeMillis(), htd); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java index 0f9743f70a0..d14610aec86 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/KeyValueCompression.java @@ -27,12 +27,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.WritableUtils; /** + * DO NOT USE. This class is deprecated and should only be used in pre-PB WAL. + * * Compression class for {@link KeyValue}s written to the WAL. This is not * synchronized, so synchronization should be handled outside. * * Class only compresses and uncompresses row keys, family names, and the * qualifier. More may be added depending on use patterns. */ +@Deprecated class KeyValueCompression { /** * Uncompresses a KeyValue from a DataInput and returns it. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java new file mode 100644 index 00000000000..12dbb972dcc --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -0,0 +1,173 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.regionserver.wal; + +import java.io.EOFException; +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.codec.Codec; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey; +import org.apache.hadoop.hbase.util.Bytes; + +import com.google.protobuf.InvalidProtocolBufferException; + +/** + * Reader for protobuf-based WAL. + */ +@InterfaceAudience.Private +public class ProtobufLogReader extends ReaderBase { + private static final Log LOG = LogFactory.getLog(ProtobufLogReader.class); + static final byte[] PB_WAL_MAGIC = Bytes.toBytes("PWAL"); + + private FSDataInputStream inputStream; + private Codec.Decoder cellDecoder; + private WALCellCodec.ByteStringUncompressor byteStringUncompressor; + private boolean hasCompression = false; + + public ProtobufLogReader() { + super(); + } + + @Override + public void close() throws IOException { + if (this.inputStream != null) { + this.inputStream.close(); + this.inputStream = null; + } + } + + @Override + public long getPosition() throws IOException { + return inputStream.getPos(); + } + + @Override + public void reset() throws IOException { + initInternal(null, false); + initAfterCompression(); // We need a new decoder (at least). + } + + @Override + protected void initReader(FSDataInputStream stream) throws IOException { + initInternal(stream, true); + } + + private void initInternal(FSDataInputStream stream, boolean isFirst) throws IOException { + close(); + long expectedPos = PB_WAL_MAGIC.length; + if (stream == null) { + stream = fs.open(path); + stream.seek(expectedPos); + } + if (stream.getPos() != expectedPos) { + throw new IOException("The stream is at invalid position: " + stream.getPos()); + } + // Initialize metadata or, when we reset, just skip the header. + WALProtos.WALHeader.Builder builder = WALProtos.WALHeader.newBuilder(); + boolean hasHeader = builder.mergeDelimitedFrom(stream); + if (!hasHeader) { + throw new EOFException("Couldn't read WAL PB header"); + } + if (isFirst) { + WALProtos.WALHeader header = builder.build(); + this.hasCompression = header.hasHasCompression() && header.getHasCompression(); + } + this.inputStream = stream; + + } + + @Override + protected void initAfterCompression() throws IOException { + WALCellCodec codec = new WALCellCodec(this.compressionContext); + this.cellDecoder = codec.getDecoder(this.inputStream); + if (this.hasCompression) { + this.byteStringUncompressor = codec.getByteStringUncompressor(); + } + } + + @Override + protected boolean hasCompression() { + return this.hasCompression; + } + + @Override + protected boolean readNext(HLog.Entry entry) throws IOException { + WALKey.Builder builder = WALKey.newBuilder(); + boolean hasNext = false; + try { + hasNext = builder.mergeDelimitedFrom(inputStream); + } catch (InvalidProtocolBufferException ipbe) { + LOG.error("Invalid PB while reading WAL, probably an unexpected EOF, ignoring", ipbe); + } + if (!hasNext) return false; + if (!builder.isInitialized()) { + // TODO: not clear if we should try to recover from corrupt PB that looks semi-legit. + // If we can get the KV count, we could, theoretically, try to get next record. + LOG.error("Partial PB while reading WAL, probably an unexpected EOF, ignoring"); + return false; + } + WALKey walKey = builder.build(); + entry.getKey().readFieldsFromPb(walKey, this.byteStringUncompressor); + try { + int expectedCells = walKey.getFollowingKvCount(); + int actualCells = entry.getEdit().readFromCells(cellDecoder, expectedCells); + if (expectedCells != actualCells) { + throw new EOFException("Unable to read " + expectedCells + " cells, got " + actualCells); + } + } catch (EOFException ex) { + LOG.error("EOF while reading KVs, ignoring", ex); + return false; + } catch (Exception ex) { + IOException realEofEx = extractHiddenEofOrRethrow(ex); + LOG.error("EOF while reading KVs, ignoring", realEofEx); + return false; + } + return true; + } + + private IOException extractHiddenEofOrRethrow(Exception ex) throws IOException { + // There are two problems we are dealing with here. Hadoop stream throws generic exception + // for EOF, not EOFException; and scanner further hides it inside RuntimeException. + IOException ioEx = null; + if (ex instanceof IOException) { + ioEx = (IOException)ex; + } else if (ex instanceof RuntimeException + && ex.getCause() != null && ex.getCause() instanceof IOException) { + ioEx = (IOException)ex.getCause(); + } + if (ioEx != null) { + if (ioEx.getMessage().contains("EOF")) return ioEx; + throw ioEx; + } + throw new IOException(ex); + } + + @Override + protected void seekOnFs(long pos) throws IOException { + this.inputStream.seek(pos); + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java new file mode 100644 index 00000000000..3d81cc455d7 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java @@ -0,0 +1,130 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.regionserver.wal; + +import java.io.IOException; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.codec.Codec; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALHeader; + +/** + * Writer for protobuf-based WAL. + */ +@InterfaceAudience.Private +public class ProtobufLogWriter implements HLog.Writer { + private final Log LOG = LogFactory.getLog(this.getClass()); + private FSDataOutputStream output; + private Codec.Encoder cellEncoder; + private WALCellCodec.ByteStringCompressor compressor; + + + /** Context used by our wal dictionary compressor. + * Null if we're not to do our custom dictionary compression. */ + private CompressionContext compressionContext; + + public ProtobufLogWriter() { + super(); + } + + @Override + public void init(FileSystem fs, Path path, Configuration conf) throws IOException { + assert this.output == null; + boolean doCompress = conf.getBoolean(HConstants.ENABLE_WAL_COMPRESSION, false); + if (doCompress) { + try { + this.compressionContext = new CompressionContext(LRUDictionary.class); + } catch (Exception e) { + throw new IOException("Failed to initiate CompressionContext", e); + } + } + int bufferSize = fs.getConf().getInt("io.file.buffer.size", 4096); + short replication = (short)conf.getInt( + "hbase.regionserver.hlog.replication", fs.getDefaultReplication()); + long blockSize = conf.getLong("hbase.regionserver.hlog.blocksize", fs.getDefaultBlockSize()); + output = fs.create(path, true, bufferSize, replication, blockSize); + output.write(ProtobufLogReader.PB_WAL_MAGIC); + WALHeader.newBuilder().setHasCompression(doCompress).build().writeDelimitedTo(output); + + WALCellCodec codec = new WALCellCodec(this.compressionContext); + this.cellEncoder = codec.getEncoder(this.output); + if (doCompress) { + this.compressor = codec.getByteStringCompressor(); + } + LOG.debug("Writing protobuf WAL; path=" + path + ", compression=" + doCompress); + } + + @Override + public void append(HLog.Entry entry) throws IOException { + entry.setCompressionContext(compressionContext); + entry.getKey().getBuilder(compressor).setFollowingKvCount(entry.getEdit().size()) + .build().writeDelimitedTo(output); + for (KeyValue kv : entry.getEdit().getKeyValues()) { + // cellEncoder must assume little about the stream, since we write PB and cells in turn. + cellEncoder.write(kv); + } + } + + @Override + public void close() throws IOException { + if (this.output != null) { + try { + this.output.close(); + } catch (NullPointerException npe) { + // Can get a NPE coming up from down in DFSClient$DFSOutputStream#close + LOG.warn(npe); + } + this.output = null; + } + } + + @Override + public void sync() throws IOException { + try { + this.output.flush(); + this.output.sync(); + } catch (NullPointerException npe) { + // Concurrent close... + throw new IOException(npe); + } + } + + @Override + public long getLength() throws IOException { + try { + return this.output.getPos(); + } catch (NullPointerException npe) { + // Concurrent close... + throw new IOException(npe); + } + } + + public FSDataOutputStream getStream() { + return this.output; + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java new file mode 100644 index 00000000000..742a3ec23d5 --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ReaderBase.java @@ -0,0 +1,137 @@ +/** + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hbase.regionserver.wal; + +import java.io.IOException; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +@InterfaceAudience.Private +public abstract class ReaderBase implements HLog.Reader { + protected Configuration conf; + protected FileSystem fs; + protected Path path; + protected long edit = 0; + /** + * Compression context to use reading. Can be null if no compression. + */ + protected CompressionContext compressionContext = null; + protected boolean emptyCompressionContext = true; + + /** + * Default constructor. + */ + public ReaderBase() { + } + + @Override + public void init(FileSystem fs, Path path, Configuration conf, FSDataInputStream stream) + throws IOException { + this.conf = conf; + this.path = path; + this.fs = fs; + + initReader(stream); + + boolean compression = hasCompression(); + if (compression) { + // If compression is enabled, new dictionaries are created here. + try { + if (compressionContext == null) { + compressionContext = new CompressionContext(LRUDictionary.class); + } else { + compressionContext.clear(); + } + } catch (Exception e) { + throw new IOException("Failed to initialize CompressionContext", e); + } + } + initAfterCompression(); + } + + @Override + public HLog.Entry next() throws IOException { + return next(null); + } + + @Override + public HLog.Entry next(HLog.Entry reuse) throws IOException { + HLog.Entry e = reuse; + if (e == null) { + e = new HLog.Entry(new HLogKey(), new WALEdit()); + } + if (compressionContext != null) { + e.setCompressionContext(compressionContext); + } + + boolean hasEntry = readNext(e); + edit++; + if (compressionContext != null && emptyCompressionContext) { + emptyCompressionContext = false; + } + return hasEntry ? e : null; + } + + + @Override + public void seek(long pos) throws IOException { + if (compressionContext != null && emptyCompressionContext) { + while (next() != null) { + if (getPosition() == pos) { + emptyCompressionContext = false; + break; + } + } + } + seekOnFs(pos); + } + + /** + * Initializes the log reader with a particular stream (may be null). + * Reader assumes ownership of the stream if not null and may use it. Called once. + */ + protected abstract void initReader(FSDataInputStream stream) throws IOException; + + /** + * Initializes the compression after the shared stuff has been initialized. Called once. + */ + protected abstract void initAfterCompression() throws IOException; + /** + * @return Whether compression is enabled for this log. + */ + protected abstract boolean hasCompression(); + + /** + * Read next entry. + * @param e The entry to read into. + * @return Whether there was anything to read. + */ + protected abstract boolean readNext(HLog.Entry e) throws IOException; + + /** + * Performs a filesystem-level seek to a certain position in an underlying file. + */ + protected abstract void seekOnFs(long pos) throws IOException; + +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java index fea92a3555e..d2bfae587f1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogReader.java @@ -23,6 +23,8 @@ import java.io.FilterInputStream; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Method; +import java.util.NavigableMap; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -31,12 +33,24 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.regionserver.wal.HLog.Entry; import org.apache.hadoop.io.SequenceFile; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.SequenceFile.Metadata; @InterfaceAudience.Private -public class SequenceFileLogReader implements HLog.Reader { +public class SequenceFileLogReader extends ReaderBase { private static final Log LOG = LogFactory.getLog(SequenceFileLogReader.class); + // Legacy stuff from pre-PB WAL metadata. + private static final Text WAL_VERSION_KEY = new Text("version"); + // Let the version be 1. Let absence of a version meta tag be old, version 0. + // Set this version '1' to be the version that introduces compression, + // the COMPRESSION_VERSION. + private static final int COMPRESSION_VERSION = 1; + private static final Text WAL_COMPRESSION_TYPE_KEY = new Text("compression.type"); + private static final Text DICTIONARY_COMPRESSION_TYPE = new Text("dictionary"); + /** * Hack just to set the correct file length up in SequenceFile.Reader. * See HADOOP-6307. The below is all about setting the right length on the @@ -49,7 +63,7 @@ public class SequenceFileLogReader implements HLog.Reader { * this.end = in.getPos() + length; * */ - static class WALReader extends SequenceFile.Reader { + private static class WALReader extends SequenceFile.Reader { WALReader(final FileSystem fs, final Path p, final Configuration c) throws IOException { @@ -64,15 +78,6 @@ public class SequenceFileLogReader implements HLog.Reader { bufferSize, length), length); } - /** - * Call this method after init() has been executed - * - * @return whether WAL compression is enabled - */ - public boolean isWALCompressionEnabled() { - return SequenceFileLogWriter.isWALCompressionEnabled(this.getMetadata()); - } - /** * Override just so can intercept first call to getPos. */ @@ -138,59 +143,12 @@ public class SequenceFileLogReader implements HLog.Reader { } } - Configuration conf; - WALReader reader; - FileSystem fs; + // Protected for tests. + protected SequenceFile.Reader reader; + long entryStart = 0; // needed for logging exceptions - // Needed logging exceptions - Path path; - int edit = 0; - long entryStart = 0; - boolean emptyCompressionContext = true; - /** - * Compression context to use reading. Can be null if no compression. - */ - protected CompressionContext compressionContext = null; - - protected Class keyClass; - - /** - * Default constructor. - */ public SequenceFileLogReader() { - } - - /** - * This constructor allows a specific HLogKey implementation to override that - * which would otherwise be chosen via configuration property. - * - * @param keyClass - */ - public SequenceFileLogReader(Class keyClass) { - this.keyClass = keyClass; - } - - @Override - public void init(FileSystem fs, Path path, Configuration conf) - throws IOException { - this.conf = conf; - this.path = path; - reader = new WALReader(fs, path, conf); - this.fs = fs; - - // If compression is enabled, new dictionaries are created here. - boolean compression = reader.isWALCompressionEnabled(); - if (compression) { - try { - if (compressionContext == null) { - compressionContext = new CompressionContext(LRUDictionary.class); - } else { - compressionContext.clear(); - } - } catch (Exception e) { - throw new IOException("Failed to initialize CompressionContext", e); - } - } + super(); } @Override @@ -206,57 +164,70 @@ public class SequenceFileLogReader implements HLog.Reader { } @Override - public HLog.Entry next() throws IOException { - return next(null); + public long getPosition() throws IOException { + return reader != null ? reader.getPosition() : 0; } @Override - public HLog.Entry next(HLog.Entry reuse) throws IOException { - this.entryStart = this.reader.getPosition(); - HLog.Entry e = reuse; - if (e == null) { - HLogKey key; - if (keyClass == null) { - key = HLogUtil.newKey(conf); - } else { - try { - key = keyClass.newInstance(); - } catch (InstantiationException ie) { - throw new IOException(ie); - } catch (IllegalAccessException iae) { - throw new IOException(iae); - } - } + public void reset() throws IOException { + // Resetting the reader lets us see newly added data if the file is being written to + // We also keep the same compressionContext which was previously populated for this file + reader = new WALReader(fs, path, conf); + } - WALEdit val = new WALEdit(); - e = new HLog.Entry(key, val); + @Override + protected void initReader(FSDataInputStream stream) throws IOException { + // We don't use the stream because we have to have the magic stream above. + if (stream != null) { + stream.close(); } - boolean b = false; + reset(); + } + + @Override + protected void initAfterCompression() throws IOException { + // Nothing to do here + } + + @Override + protected boolean hasCompression() { + return isWALCompressionEnabled(reader.getMetadata()); + } + + /** + * Call this method after init() has been executed + * @return whether WAL compression is enabled + */ + static boolean isWALCompressionEnabled(final Metadata metadata) { + // Check version is >= VERSION? + Text txt = metadata.get(WAL_VERSION_KEY); + if (txt == null || Integer.parseInt(txt.toString()) < COMPRESSION_VERSION) { + return false; + } + // Now check that compression type is present. Currently only one value. + txt = metadata.get(WAL_COMPRESSION_TYPE_KEY); + return txt != null && txt.equals(DICTIONARY_COMPRESSION_TYPE); + } + + + @Override + protected boolean readNext(Entry e) throws IOException { try { - if (compressionContext != null) { - e.setCompressionContext(compressionContext); + boolean hasNext = this.reader.next(e.getKey(), e.getEdit()); + if (!hasNext) return false; + // Scopes are probably in WAL edit, move to key + NavigableMap scopes = e.getEdit().getAndRemoveScopes(); + if (scopes != null) { + e.getKey().setScopes(scopes); } - b = this.reader.next(e.getKey(), e.getEdit()); + return true; } catch (IOException ioe) { throw addFileInfoToException(ioe); } - edit++; - if (compressionContext != null && emptyCompressionContext) { - emptyCompressionContext = false; - } - return b? e: null; } @Override - public void seek(long pos) throws IOException { - if (compressionContext != null && emptyCompressionContext) { - while (next() != null) { - if (getPosition() == pos) { - emptyCompressionContext = false; - break; - } - } - } + protected void seekOnFs(long pos) throws IOException { try { reader.seek(pos); } catch (IOException ioe) { @@ -264,11 +235,6 @@ public class SequenceFileLogReader implements HLog.Reader { } } - @Override - public long getPosition() throws IOException { - return reader != null ? reader.getPosition() : 0; - } - protected IOException addFileInfoToException(final IOException ioe) throws IOException { long pos = -1; @@ -301,11 +267,4 @@ public class SequenceFileLogReader implements HLog.Reader { return ioe; } - - @Override - public void reset() throws IOException { - // Resetting the reader lets us see newly added data if the file is being written to - // We also keep the same compressionContext which was previously populated for this file - reader = new WALReader(fs, path, conf); - } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java new file mode 100644 index 00000000000..245abba2ddc --- /dev/null +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -0,0 +1,335 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.regionserver.wal; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.codec.BaseDecoder; +import org.apache.hadoop.hbase.codec.BaseEncoder; +import org.apache.hadoop.hbase.codec.Codec; +import org.apache.hadoop.hbase.codec.KeyValueCodec; +import org.apache.hadoop.hbase.util.Bytes; + +import com.google.common.base.Preconditions; +import com.google.protobuf.ByteString; + + +/** + * Compression in this class is lifted off Compressor/KeyValueCompression. + * This is a pure coincidence... they are independent and don't have to be compatible. + */ +public class WALCellCodec implements Codec { + private final CompressionContext compression; + private final ByteStringUncompressor statelessUncompressor = new ByteStringUncompressor() { + @Override + public byte[] uncompress(ByteString data, Dictionary dict) throws IOException { + return WALCellCodec.uncompressByteString(data, dict); + } + }; + + public WALCellCodec(CompressionContext compression) { + this.compression = compression; + } + + public interface ByteStringCompressor { + ByteString compress(byte[] data, Dictionary dict) throws IOException; + } + + public interface ByteStringUncompressor { + byte[] uncompress(ByteString data, Dictionary dict) throws IOException; + } + + // TODO: it sucks that compression context is in HLog.Entry. It'd be nice if it was here. + // Dictionary could be gotten by enum; initially, based on enum, context would create + // an array of dictionaries. + static class BaosAndCompressor extends ByteArrayOutputStream implements ByteStringCompressor { + public ByteString toByteString() { + return ByteString.copyFrom(this.buf, 0, this.count); + } + + @Override + public ByteString compress(byte[] data, Dictionary dict) throws IOException { + writeCompressed(data, dict); + ByteString result = ByteString.copyFrom(this.buf, 0, this.count); + reset(); // Only resets the count - we reuse the byte array. + return result; + } + + private void writeCompressed(byte[] data, Dictionary dict) throws IOException { + assert dict != null; + short dictIdx = dict.findEntry(data, 0, data.length); + if (dictIdx == Dictionary.NOT_IN_DICTIONARY) { + write(Dictionary.NOT_IN_DICTIONARY); + StreamUtils.writeRawVInt32(this, data.length); + write(data, 0, data.length); + } else { + StreamUtils.writeShort(this, dictIdx); + } + } + } + + private static byte[] uncompressByteString(ByteString bs, Dictionary dict) throws IOException { + InputStream in = bs.newInput(); + byte status = (byte)in.read(); + if (status == Dictionary.NOT_IN_DICTIONARY) { + byte[] arr = new byte[StreamUtils.readRawVarint32(in)]; + int bytesRead = in.read(arr); + if (bytesRead != arr.length) { + throw new IOException("Cannot read; wanted " + arr.length + ", but got " + bytesRead); + } + if (dict != null) dict.addEntry(arr, 0, arr.length); + return arr; + } else { + // Status here is the higher-order byte of index of the dictionary entry. + short dictIdx = StreamUtils.toShort(status, (byte)in.read()); + byte[] entry = dict.getEntry(dictIdx); + if (entry == null) { + throw new IOException("Missing dictionary entry for index " + dictIdx); + } + return entry; + } + } + + static class CompressedKvEncoder extends BaseEncoder { + private final CompressionContext compression; + public CompressedKvEncoder(OutputStream out, CompressionContext compression) { + super(out); + this.compression = compression; + } + + @Override + public void write(Cell cell) throws IOException { + if (!(cell instanceof KeyValue)) throw new IOException("Cannot write non-KV cells to WAL"); + KeyValue kv = (KeyValue)cell; + byte[] kvBuffer = kv.getBuffer(); + int offset = kv.getOffset(); + + // We first write the KeyValue infrastructure as VInts. + StreamUtils.writeRawVInt32(out, kv.getKeyLength()); + StreamUtils.writeRawVInt32(out, kv.getValueLength()); + + // Write row, qualifier, and family; use dictionary + // compression as they're likely to have duplicates. + write(kvBuffer, kv.getRowOffset(), kv.getRowLength(), compression.rowDict); + write(kvBuffer, kv.getFamilyOffset(), kv.getFamilyLength(), compression.familyDict); + write(kvBuffer, kv.getQualifierOffset(), kv.getQualifierLength(), compression.qualifierDict); + + // Write the rest uncompressed. + int pos = kv.getTimestampOffset(); + int remainingLength = kv.getLength() + offset - pos; + out.write(kvBuffer, pos, remainingLength); + + } + + private void write(byte[] data, int offset, int length, Dictionary dict) throws IOException { + short dictIdx = Dictionary.NOT_IN_DICTIONARY; + if (dict != null) { + dictIdx = dict.findEntry(data, offset, length); + } + if (dictIdx == Dictionary.NOT_IN_DICTIONARY) { + out.write(Dictionary.NOT_IN_DICTIONARY); + StreamUtils.writeRawVInt32(out, length); + out.write(data, offset, length); + } else { + StreamUtils.writeShort(out, dictIdx); + } + } + } + + static class CompressedKvDecoder extends BaseDecoder { + private final CompressionContext compression; + public CompressedKvDecoder(InputStream in, CompressionContext compression) { + super(in); + this.compression = compression; + } + + @Override + protected Cell parseCell() throws IOException { + int keylength = StreamUtils.readRawVarint32(in); + int vlength = StreamUtils.readRawVarint32(in); + int length = KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE + keylength + vlength; + + byte[] backingArray = new byte[length]; + int pos = 0; + pos = Bytes.putInt(backingArray, pos, keylength); + pos = Bytes.putInt(backingArray, pos, vlength); + + // the row + int elemLen = readIntoArray(backingArray, pos + Bytes.SIZEOF_SHORT, compression.rowDict); + checkLength(elemLen, Short.MAX_VALUE); + pos = Bytes.putShort(backingArray, pos, (short)elemLen); + pos += elemLen; + + // family + elemLen = readIntoArray(backingArray, pos + Bytes.SIZEOF_BYTE, compression.familyDict); + checkLength(elemLen, Byte.MAX_VALUE); + pos = Bytes.putByte(backingArray, pos, (byte)elemLen); + pos += elemLen; + + // qualifier + elemLen = readIntoArray(backingArray, pos, compression.qualifierDict); + pos += elemLen; + + // the rest + in.read(backingArray, pos, length - pos); + return new KeyValue(backingArray); + } + + private int readIntoArray(byte[] to, int offset, Dictionary dict) throws IOException { + byte status = (byte)in.read(); + if (status == Dictionary.NOT_IN_DICTIONARY) { + // status byte indicating that data to be read is not in dictionary. + // if this isn't in the dictionary, we need to add to the dictionary. + int length = StreamUtils.readRawVarint32(in); + in.read(to, offset, length); + dict.addEntry(to, offset, length); + return length; + } else { + // the status byte also acts as the higher order byte of the dictionary entry. + short dictIdx = StreamUtils.toShort(status, (byte)in.read()); + byte[] entry = dict.getEntry(dictIdx); + if (entry == null) { + throw new IOException("Missing dictionary entry for index " + dictIdx); + } + // now we write the uncompressed value. + Bytes.putBytes(to, offset, entry, 0, entry.length); + return entry.length; + } + } + + private static void checkLength(int len, int max) throws IOException { + if (len < 0 || len > max) { + throw new IOException("Invalid length for compresesed portion of keyvalue: " + len); + } + } + } + + public class EnsureKvEncoder extends KeyValueCodec.KeyValueEncoder { + public EnsureKvEncoder(OutputStream out) { + super(out); + } + @Override + public void write(Cell cell) throws IOException { + if (!(cell instanceof KeyValue)) throw new IOException("Cannot write non-KV cells to WAL"); + super.write(cell); + } + } + + @Override + public Decoder getDecoder(InputStream is) { + return (compression == null) + ? new KeyValueCodec.KeyValueDecoder(is) : new CompressedKvDecoder(is, compression); + } + + @Override + public Encoder getEncoder(OutputStream os) { + return (compression == null) + ? new EnsureKvEncoder(os) : new CompressedKvEncoder(os, compression); + } + + public ByteStringCompressor getByteStringCompressor() { + // TODO: ideally this should also encapsulate compressionContext + return new BaosAndCompressor(); + } + + public ByteStringUncompressor getByteStringUncompressor() { + // TODO: ideally this should also encapsulate compressionContext + return this.statelessUncompressor; + } + + /** + * It seems like as soon as somebody sets himself to the task of creating VInt encoding, + * his mind blanks out for a split-second and he starts the work by wrapping it in the + * most convoluted interface he can come up with. Custom streams that allocate memory, + * DataOutput that is only used to write single bytes... We operate on simple streams. + * Thus, we are going to have a simple implementation copy-pasted from protobuf Coded*Stream. + */ + private static class StreamUtils { + public static int computeRawVarint32Size(final int value) { + if ((value & (0xffffffff << 7)) == 0) return 1; + if ((value & (0xffffffff << 14)) == 0) return 2; + if ((value & (0xffffffff << 21)) == 0) return 3; + if ((value & (0xffffffff << 28)) == 0) return 4; + return 5; + } + + static void writeRawVInt32(OutputStream output, int value) throws IOException { + assert value >= 0; + while (true) { + if ((value & ~0x7F) == 0) { + output.write(value); + return; + } else { + output.write((value & 0x7F) | 0x80); + value >>>= 7; + } + } + } + + static int readRawVarint32(InputStream input) throws IOException { + byte tmp = (byte)input.read(); + if (tmp >= 0) { + return tmp; + } + int result = tmp & 0x7f; + if ((tmp = (byte)input.read()) >= 0) { + result |= tmp << 7; + } else { + result |= (tmp & 0x7f) << 7; + if ((tmp = (byte)input.read()) >= 0) { + result |= tmp << 14; + } else { + result |= (tmp & 0x7f) << 14; + if ((tmp = (byte)input.read()) >= 0) { + result |= tmp << 21; + } else { + result |= (tmp & 0x7f) << 21; + result |= (tmp = (byte)input.read()) << 28; + if (tmp < 0) { + // Discard upper 32 bits. + for (int i = 0; i < 5; i++) { + if (input.read() >= 0) { + return result; + } + } + throw new IOException("Malformed varint"); + } + } + } + } + return result; + } + + static short toShort(byte hi, byte lo) { + short s = (short) (((hi & 0xFF) << 8) | (lo & 0xFF)); + Preconditions.checkArgument(s >= 0); + return s; + } + + static void writeShort(OutputStream out, short v) throws IOException { + Preconditions.checkArgument(v >= 0); + out.write((byte)(0xff & (v >> 8))); + out.write((byte)(0xff & v)); + } + } +} diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java index f9b72f2316f..f5a47af914f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALEdit.java @@ -26,14 +26,19 @@ import java.util.List; import java.util.NavigableMap; import java.util.TreeMap; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.HeapSize; -import org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor; +import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.io.Writable; + /** * WALEdit: Used in HBase's transaction log (WAL) to represent * the collection of edits (KeyValue objects) corresponding to a @@ -70,15 +75,19 @@ import org.apache.hadoop.io.Writable; */ @InterfaceAudience.Private public class WALEdit implements Writable, HeapSize { - // TODO: Make it so user cannot make a cf w/ this name. Make the illegal cf names. Ditto for row. + public static final Log LOG = LogFactory.getLog(WALEdit.class); + + // TODO: Get rid of this; see HBASE-8457 public static final byte [] METAFAMILY = Bytes.toBytes("METAFAMILY"); static final byte [] METAROW = Bytes.toBytes("METAROW"); static final byte[] COMPLETE_CACHE_FLUSH = Bytes.toBytes("HBASE::CACHEFLUSH"); static final byte[] COMPACTION = Bytes.toBytes("HBASE::COMPACTION"); - private final int VERSION_2 = -1; private final ArrayList kvs = new ArrayList(); + + // Only here for legacy writable deserialization + @Deprecated private NavigableMap scopes; private CompressionContext compressionContext; @@ -115,15 +124,10 @@ public class WALEdit implements Writable, HeapSize { return kvs; } - public NavigableMap getScopes() { - return scopes; - } - - - public void setScopes (NavigableMap scopes) { - // We currently process the map outside of WALEdit, - // TODO revisit when replication is part of core - this.scopes = scopes; + public NavigableMap getAndRemoveScopes() { + NavigableMap result = scopes; + scopes = null; + return result; } public void readFields(DataInput in) throws IOException { @@ -141,7 +145,7 @@ public class WALEdit implements Writable, HeapSize { this.add(KeyValueCompression.readKV(in, compressionContext)); } else { this.add(KeyValue.create(in)); - } + } } int numFamilies = in.readInt(); if (numFamilies > 0) { @@ -159,10 +163,10 @@ public class WALEdit implements Writable, HeapSize { // read is actually the length of a single KeyValue this.add(KeyValue.create(versionOrLength, in)); } - } public void write(DataOutput out) throws IOException { + LOG.warn("WALEdit is being serialized to writable - only expected in test code"); out.writeInt(VERSION_2); out.writeInt(kvs.size()); // We interleave the two lists for code simplicity @@ -184,6 +188,24 @@ public class WALEdit implements Writable, HeapSize { } } + /** + * Reads WALEdit from cells. + * @param cellDecoder Cell decoder. + * @param expectedCount Expected cell count. + * @return Number of KVs read. + */ + public int readFromCells(Codec.Decoder cellDecoder, int expectedCount) throws IOException { + kvs.clear(); + while (kvs.size() < expectedCount && cellDecoder.advance()) { + Cell cell = cellDecoder.current(); + if (!(cell instanceof KeyValue)) { + throw new IOException("WAL edit only supports KVs as cells"); + } + kvs.add((KeyValue)cell); + } + return kvs.size(); + } + public long heapSize() { long ret = 0; for (KeyValue kv : kvs) { @@ -234,4 +256,5 @@ public class WALEdit implements Writable, HeapSize { } return null; } -} \ No newline at end of file +} + diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java index 71aa2c76ea8..0d26e241c1d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/Replication.java @@ -215,7 +215,7 @@ public class Replication implements WALActionsListener, } } if (!scopes.isEmpty()) { - logEdit.setScopes(scopes); + logKey.setScopes(scopes); } } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java index 398501a0143..cb605711809 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/regionserver/ReplicationSource.java @@ -491,7 +491,7 @@ public class ReplicationSource extends Thread HLogKey logKey = entry.getKey(); // don't replicate if the log entries originated in the peer if (!logKey.getClusterId().equals(peerClusterId)) { - removeNonReplicableEdits(edit); + removeNonReplicableEdits(entry); // Don't replicate catalog entries, if the WALEdit wasn't // containing anything to replicate and if we're currently not set to replicate if (!(Bytes.equals(logKey.getTablename(), HConstants.ROOT_TABLE_NAME) || @@ -666,12 +666,12 @@ public class ReplicationSource extends Thread /** * We only want KVs that are scoped other than local - * @param edit The KV to check for replication + * @param entry The entry to check for replication */ - protected void removeNonReplicableEdits(WALEdit edit) { - NavigableMap scopes = edit.getScopes(); - List kvs = edit.getKeyValues(); - for (int i = edit.size()-1; i >= 0; i--) { + protected void removeNonReplicableEdits(HLog.Entry entry) { + NavigableMap scopes = entry.getKey().getScopes(); + List kvs = entry.getEdit().getKeyValues(); + for (int i = kvs.size()-1; i >= 0; i--) { KeyValue kv = kvs.get(i); // The scope will be null or empty if // there's nothing to replicate in that WALEdit diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java index 57e04f28a14..485b22c82a1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHLogRecordReader.java @@ -110,34 +110,25 @@ public class TestHLogRecordReader { */ @Test public void testPartialRead() throws Exception { - HLog log = HLogFactory.createHLog(fs, hbaseDir, - logName, conf); + HLog log = HLogFactory.createHLog(fs, hbaseDir, logName, conf); long ts = System.currentTimeMillis(); WALEdit edit = new WALEdit(); - edit.add(new KeyValue(rowName, family, Bytes.toBytes("1"), - ts, value)); - log.append(info, tableName, edit, - ts, htd); + edit.add(new KeyValue(rowName, family, Bytes.toBytes("1"), ts, value)); + log.append(info, tableName, edit, ts, htd); edit = new WALEdit(); - edit.add(new KeyValue(rowName, family, Bytes.toBytes("2"), - ts+1, value)); - log.append(info, tableName, edit, - ts+1, htd); + edit.add(new KeyValue(rowName, family, Bytes.toBytes("2"), ts+1, value)); + log.append(info, tableName, edit, ts+1, htd); log.rollWriter(); Thread.sleep(1); long ts1 = System.currentTimeMillis(); edit = new WALEdit(); - edit.add(new KeyValue(rowName, family, Bytes.toBytes("3"), - ts1+1, value)); - log.append(info, tableName, edit, - ts1+1, htd); + edit.add(new KeyValue(rowName, family, Bytes.toBytes("3"), ts1+1, value)); + log.append(info, tableName, edit, ts1+1, htd); edit = new WALEdit(); - edit.add(new KeyValue(rowName, family, Bytes.toBytes("4"), - ts1+2, value)); - log.append(info, tableName, edit, - ts1+2, htd); + edit.add(new KeyValue(rowName, family, Bytes.toBytes("4"), ts1+2, value)); + log.append(info, tableName, edit, ts1+2, htd); log.close(); HLogInputFormat input = new HLogInputFormat(); @@ -229,8 +220,11 @@ public class TestHLogRecordReader { for (byte[] column : columns) { assertTrue(reader.nextKeyValue()); - assertTrue(Bytes - .equals(column, reader.getCurrentValue().getKeyValues().get(0).getQualifier())); + KeyValue kv = reader.getCurrentValue().getKeyValues().get(0); + if (!Bytes.equals(column, kv.getQualifier())) { + assertTrue("expected [" + Bytes.toString(column) + "], actual [" + + Bytes.toString(kv.getQualifier()) + "]", false); + } } assertFalse(reader.nextKeyValue()); reader.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index a40b71f369d..57f49060906 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -84,7 +84,7 @@ import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler; import org.apache.hadoop.hbase.monitoring.MonitoredTask; import org.apache.hadoop.hbase.monitoring.TaskMonitor; import org.apache.hadoop.hbase.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.protobuf.generated.WAL.CompactionDescriptor; +import org.apache.hadoop.hbase.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogFactory; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/FaultySequenceFileLogReader.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/FaultySequenceFileLogReader.java index b79e4385a02..2164a435818 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/FaultySequenceFileLogReader.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/FaultySequenceFileLogReader.java @@ -41,14 +41,12 @@ public class FaultySequenceFileLogReader extends SequenceFileLogReader { @Override public HLog.Entry next(HLog.Entry reuse) throws IOException { - this.entryStart = this.reader.getPosition(); + this.entryStart = this.getPosition(); boolean b = true; if (nextQueue.isEmpty()) { // Read the whole thing at once and fake reading while (b == true) { - HLogKey key = HLogUtil.newKey(conf); - WALEdit val = new WALEdit(); - HLog.Entry e = new HLog.Entry(key, val); + HLog.Entry e = new HLog.Entry(new HLogKey(), new WALEdit()); if (compressionContext != null) { e.setCompressionContext(compressionContext); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedSequenceFileLogWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedSequenceFileLogWriter.java index 1e669a4cf35..d240e66afbc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedSequenceFileLogWriter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/InstrumentedSequenceFileLogWriter.java @@ -22,12 +22,12 @@ import java.io.IOException; import org.apache.hadoop.hbase.util.Bytes; -public class InstrumentedSequenceFileLogWriter extends SequenceFileLogWriter { +public class InstrumentedSequenceFileLogWriter extends ProtobufLogWriter { public InstrumentedSequenceFileLogWriter() { - super(HLogKey.class); + super(); } - + public static boolean activateFailure = false; @Override public void append(HLog.Entry entry) throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java similarity index 81% rename from hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java rename to hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java index 88f0ebf393f..721b87c11f5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/SequenceFileLogWriter.java @@ -41,20 +41,10 @@ import org.apache.hadoop.io.compress.DefaultCodec; /** * Implementation of {@link HLog.Writer} that delegates to - * SequenceFile.Writer. + * SequenceFile.Writer. Legacy implementation only used for compat tests. */ @InterfaceAudience.Private public class SequenceFileLogWriter implements HLog.Writer { - static final Text WAL_VERSION_KEY = new Text("version"); - // Let the version be 1. Let absence of a version meta tag be old, version 0. - // Set this version '1' to be the version that introduces compression, - // the COMPRESSION_VERSION. - private static final int COMPRESSION_VERSION = 1; - static final int VERSION = COMPRESSION_VERSION; - static final Text WAL_VERSION = new Text("" + VERSION); - static final Text WAL_COMPRESSION_TYPE_KEY = new Text("compression.type"); - static final Text DICTIONARY_COMPRESSION_TYPE = new Text("dictionary"); - private final Log LOG = LogFactory.getLog(this.getClass()); // The sequence file we delegate to. private SequenceFile.Writer writer; @@ -62,8 +52,11 @@ public class SequenceFileLogWriter implements HLog.Writer { // in the SequenceFile.Writer 'writer' instance above. private FSDataOutputStream writer_out; - private Class keyClass; - + // Legacy stuff from pre-PB WAL metadata. + private static final Text WAL_VERSION_KEY = new Text("version"); + private static final Text WAL_COMPRESSION_TYPE_KEY = new Text("compression.type"); + private static final Text DICTIONARY_COMPRESSION_TYPE = new Text("dictionary"); + /** * Context used by our wal dictionary compressor. Null if we're not to do * our custom dictionary compression. This custom WAL compression is distinct @@ -78,16 +71,6 @@ public class SequenceFileLogWriter implements HLog.Writer { super(); } - /** - * This constructor allows a specific HLogKey implementation to override that - * which would otherwise be chosen via configuration property. - * - * @param keyClass - */ - public SequenceFileLogWriter(Class keyClass) { - this.keyClass = keyClass; - } - /** * Create sequence file Metadata for our WAL file with version and compression * type (if any). @@ -98,7 +81,7 @@ public class SequenceFileLogWriter implements HLog.Writer { private static Metadata createMetadata(final Configuration conf, final boolean compress) { TreeMap metaMap = new TreeMap(); - metaMap.put(WAL_VERSION_KEY, WAL_VERSION); + metaMap.put(WAL_VERSION_KEY, new Text("1")); if (compress) { // Currently we only do one compression type. metaMap.put(WAL_COMPRESSION_TYPE_KEY, DICTIONARY_COMPRESSION_TYPE); @@ -106,22 +89,6 @@ public class SequenceFileLogWriter implements HLog.Writer { return new Metadata(metaMap); } - /** - * Call this method after init() has been executed - * - * @return whether WAL compression is enabled - */ - static boolean isWALCompressionEnabled(final Metadata metadata) { - // Check version is >= VERSION? - Text txt = metadata.get(WAL_VERSION_KEY); - if (txt == null || Integer.parseInt(txt.toString()) < COMPRESSION_VERSION) { - return false; - } - // Now check that compression type is present. Currently only one value. - txt = metadata.get(WAL_COMPRESSION_TYPE_KEY); - return txt != null && txt.equals(DICTIONARY_COMPRESSION_TYPE); - } - @Override public void init(FileSystem fs, Path path, Configuration conf) throws IOException { @@ -139,10 +106,6 @@ public class SequenceFileLogWriter implements HLog.Writer { } } - if (null == keyClass) { - keyClass = HLogUtil.getKeyClass(conf); - } - // Create a SF.Writer instance. try { // reflection for a version of SequenceFile.createWriter that doesn't @@ -152,8 +115,7 @@ public class SequenceFileLogWriter implements HLog.Writer { Configuration.class, Path.class, Class.class, Class.class, Integer.TYPE, Short.TYPE, Long.TYPE, Boolean.TYPE, CompressionType.class, CompressionCodec.class, Metadata.class}) - .invoke(null, new Object[] {fs, conf, path, HLogUtil.getKeyClass(conf), - WALEdit.class, + .invoke(null, new Object[] {fs, conf, path, HLogKey.class, WALEdit.class, Integer.valueOf(fs.getConf().getInt("io.file.buffer.size", 4096)), Short.valueOf((short) conf.getInt("hbase.regionserver.hlog.replication", @@ -175,7 +137,7 @@ public class SequenceFileLogWriter implements HLog.Writer { if (this.writer == null) { LOG.debug("new createWriter -- HADOOP-6840 -- not available"); this.writer = SequenceFile.createWriter(fs, conf, path, - HLogUtil.getKeyClass(conf), WALEdit.class, + HLogKey.class, WALEdit.class, fs.getConf().getInt("io.file.buffer.size", 4096), (short) conf.getInt("hbase.regionserver.hlog.replication", fs.getDefaultReplication()), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java index 3c24a22aad2..e025d7d2689 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLog.java @@ -754,6 +754,73 @@ public class TestHLog { log.append(hri, tableName, cols, timestamp, htd); } } + + + /** + * @throws IOException + */ + @Test + public void testReadLegacyLog() throws IOException { + final int columnCount = 5; + final int recordCount = 5; + final byte[] tableName = Bytes.toBytes("tablename"); + final byte[] row = Bytes.toBytes("row"); + long timestamp = System.currentTimeMillis(); + Path path = new Path(dir, "temphlog"); + SequenceFileLogWriter sflw = null; + HLog.Reader reader = null; + try { + HRegionInfo hri = new HRegionInfo(tableName, + HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW); + HTableDescriptor htd = new HTableDescriptor(tableName); + fs.mkdirs(dir); + // Write log in pre-PB format. + sflw = new SequenceFileLogWriter(); + sflw.init(fs, path, conf); + for (int i = 0; i < recordCount; ++i) { + HLogKey key = new HLogKey( + hri.getEncodedNameAsBytes(), tableName, i, timestamp, HConstants.DEFAULT_CLUSTER_ID); + WALEdit edit = new WALEdit(); + for (int j = 0; j < columnCount; ++j) { + if (i == 0) { + htd.addFamily(new HColumnDescriptor("column" + j)); + } + String value = i + "" + j; + edit.add(new KeyValue(row, row, row, timestamp, Bytes.toBytes(value))); + } + sflw.append(new HLog.Entry(key, edit)); + } + sflw.sync(); + sflw.close(); + + // Now read the log using standard means. + reader = HLogFactory.createReader(fs, path, conf); + assertTrue(reader instanceof SequenceFileLogReader); + for (int i = 0; i < recordCount; ++i) { + HLog.Entry entry = reader.next(); + assertNotNull(entry); + assertEquals(columnCount, entry.getEdit().size()); + assertArrayEquals(hri.getEncodedNameAsBytes(), entry.getKey().getEncodedRegionName()); + assertArrayEquals(tableName, entry.getKey().getTablename()); + int idx = 0; + for (KeyValue val : entry.getEdit().getKeyValues()) { + assertTrue(Bytes.equals(row, val.getRow())); + String value = i + "" + idx; + assertArrayEquals(Bytes.toBytes(value), val.getValue()); + idx++; + } + } + HLog.Entry entry = reader.next(); + assertNull(entry); + } finally { + if (sflw != null) { + sflw.close(); + } + if (reader != null) { + reader.close(); + } + } + } static class DumbWALActionsListener implements WALActionsListener { int increments = 0; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java index 28586791e5f..9438fb929db 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java @@ -652,7 +652,7 @@ public class TestHLogSplit { int actualCount = 0; HLog.Reader in = HLogFactory.createReader(fs, splitLog, conf); @SuppressWarnings("unused") - HLog.Entry entry; + HLog.Entry entry; while ((entry = in.next()) != null) ++actualCount; assertEquals(entryCount-1, actualCount); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java index 819721c83be..86047add863 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALActionsListener.java @@ -100,8 +100,7 @@ public class TestWALActionsListener { HTableDescriptor htd = new HTableDescriptor(); htd.addFamily(new HColumnDescriptor(b)); - HLogKey key = new HLogKey(b,b, 0, 0, HConstants.DEFAULT_CLUSTER_ID); - hlog.append(hri, key, edit, htd, true); + hlog.append(hri, b, edit, 0, htd); if (i == 10) { hlog.registerWALActionsListener(laterobserver); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java index b79cf91d047..0ffd132413c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java @@ -827,7 +827,7 @@ public class TestWALReplay { "The sequence number of the recoverd.edits and the current edit seq should be same", lastestSeqNumber, editCount); } - + static class MockHLog extends FSHLog { boolean doCompleteCacheFlush = false; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java index eef6d9252e4..e6a514e2d93 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestReplicationSourceManager.java @@ -191,7 +191,7 @@ public class TestReplicationSourceManager { LOG.info(i); HLogKey key = new HLogKey(hri.getRegionName(), test, seq++, System.currentTimeMillis(), HConstants.DEFAULT_CLUSTER_ID); - hlog.append(hri, key, edit, htd, true); + hlog.append(hri, test, edit, System.currentTimeMillis(), htd); } // Simulate a rapid insert that's followed @@ -202,9 +202,7 @@ public class TestReplicationSourceManager { LOG.info(baseline + " and " + time); for (int i = 0; i < 3; i++) { - HLogKey key = new HLogKey(hri.getRegionName(), test, seq++, - System.currentTimeMillis(), HConstants.DEFAULT_CLUSTER_ID); - hlog.append(hri, key, edit, htd, true); + hlog.append(hri, test, edit, System.currentTimeMillis(), htd); } assertEquals(6, manager.getHLogs().get(slaveId).size()); @@ -214,9 +212,7 @@ public class TestReplicationSourceManager { manager.logPositionAndCleanOldLogs(manager.getSources().get(0).getCurrentPath(), "1", 0, false, false); - HLogKey key = new HLogKey(hri.getRegionName(), test, seq++, - System.currentTimeMillis(), HConstants.DEFAULT_CLUSTER_ID); - hlog.append(hri, key, edit, htd, true); + hlog.append(hri, test, edit, System.currentTimeMillis(), htd); assertEquals(1, manager.getHLogs().size());