diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProcedureProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProcedureProtos.java
index 98260c1f95f..4713a0a3865 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProcedureProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/MasterProcedureProtos.java
@@ -371,6 +371,342 @@ public final class MasterProcedureProtos {
// @@protoc_insertion_point(enum_scope:DeleteTableState)
}
+ /**
+ * Protobuf enum {@code AddColumnFamilyState}
+ */
+ public enum AddColumnFamilyState
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * ADD_COLUMN_FAMILY_PREPARE = 1;
+ */
+ ADD_COLUMN_FAMILY_PREPARE(0, 1),
+ /**
+ * ADD_COLUMN_FAMILY_PRE_OPERATION = 2;
+ */
+ ADD_COLUMN_FAMILY_PRE_OPERATION(1, 2),
+ /**
+ * ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3;
+ */
+ ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR(2, 3),
+ /**
+ * ADD_COLUMN_FAMILY_POST_OPERATION = 4;
+ */
+ ADD_COLUMN_FAMILY_POST_OPERATION(3, 4),
+ /**
+ * ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 5;
+ */
+ ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS(4, 5),
+ ;
+
+ /**
+ * ADD_COLUMN_FAMILY_PREPARE = 1;
+ */
+ public static final int ADD_COLUMN_FAMILY_PREPARE_VALUE = 1;
+ /**
+ * ADD_COLUMN_FAMILY_PRE_OPERATION = 2;
+ */
+ public static final int ADD_COLUMN_FAMILY_PRE_OPERATION_VALUE = 2;
+ /**
+ * ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3;
+ */
+ public static final int ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR_VALUE = 3;
+ /**
+ * ADD_COLUMN_FAMILY_POST_OPERATION = 4;
+ */
+ public static final int ADD_COLUMN_FAMILY_POST_OPERATION_VALUE = 4;
+ /**
+ * ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 5;
+ */
+ public static final int ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS_VALUE = 5;
+
+
+ public final int getNumber() { return value; }
+
+ public static AddColumnFamilyState valueOf(int value) {
+ switch (value) {
+ case 1: return ADD_COLUMN_FAMILY_PREPARE;
+ case 2: return ADD_COLUMN_FAMILY_PRE_OPERATION;
+ case 3: return ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR;
+ case 4: return ADD_COLUMN_FAMILY_POST_OPERATION;
+ case 5: return ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap() {
+ public AddColumnFamilyState findValueByNumber(int number) {
+ return AddColumnFamilyState.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.getDescriptor().getEnumTypes().get(3);
+ }
+
+ private static final AddColumnFamilyState[] VALUES = values();
+
+ public static AddColumnFamilyState valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private AddColumnFamilyState(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:AddColumnFamilyState)
+ }
+
+ /**
+ * Protobuf enum {@code ModifyColumnFamilyState}
+ */
+ public enum ModifyColumnFamilyState
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * MODIFY_COLUMN_FAMILY_PREPARE = 1;
+ */
+ MODIFY_COLUMN_FAMILY_PREPARE(0, 1),
+ /**
+ * MODIFY_COLUMN_FAMILY_PRE_OPERATION = 2;
+ */
+ MODIFY_COLUMN_FAMILY_PRE_OPERATION(1, 2),
+ /**
+ * MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3;
+ */
+ MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR(2, 3),
+ /**
+ * MODIFY_COLUMN_FAMILY_POST_OPERATION = 4;
+ */
+ MODIFY_COLUMN_FAMILY_POST_OPERATION(3, 4),
+ /**
+ * MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 5;
+ */
+ MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS(4, 5),
+ ;
+
+ /**
+ * MODIFY_COLUMN_FAMILY_PREPARE = 1;
+ */
+ public static final int MODIFY_COLUMN_FAMILY_PREPARE_VALUE = 1;
+ /**
+ * MODIFY_COLUMN_FAMILY_PRE_OPERATION = 2;
+ */
+ public static final int MODIFY_COLUMN_FAMILY_PRE_OPERATION_VALUE = 2;
+ /**
+ * MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3;
+ */
+ public static final int MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR_VALUE = 3;
+ /**
+ * MODIFY_COLUMN_FAMILY_POST_OPERATION = 4;
+ */
+ public static final int MODIFY_COLUMN_FAMILY_POST_OPERATION_VALUE = 4;
+ /**
+ * MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 5;
+ */
+ public static final int MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS_VALUE = 5;
+
+
+ public final int getNumber() { return value; }
+
+ public static ModifyColumnFamilyState valueOf(int value) {
+ switch (value) {
+ case 1: return MODIFY_COLUMN_FAMILY_PREPARE;
+ case 2: return MODIFY_COLUMN_FAMILY_PRE_OPERATION;
+ case 3: return MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR;
+ case 4: return MODIFY_COLUMN_FAMILY_POST_OPERATION;
+ case 5: return MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap() {
+ public ModifyColumnFamilyState findValueByNumber(int number) {
+ return ModifyColumnFamilyState.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.getDescriptor().getEnumTypes().get(4);
+ }
+
+ private static final ModifyColumnFamilyState[] VALUES = values();
+
+ public static ModifyColumnFamilyState valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private ModifyColumnFamilyState(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:ModifyColumnFamilyState)
+ }
+
+ /**
+ * Protobuf enum {@code DeleteColumnFamilyState}
+ */
+ public enum DeleteColumnFamilyState
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * DELETE_COLUMN_FAMILY_PREPARE = 1;
+ */
+ DELETE_COLUMN_FAMILY_PREPARE(0, 1),
+ /**
+ * DELETE_COLUMN_FAMILY_PRE_OPERATION = 2;
+ */
+ DELETE_COLUMN_FAMILY_PRE_OPERATION(1, 2),
+ /**
+ * DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3;
+ */
+ DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR(2, 3),
+ /**
+ * DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT = 4;
+ */
+ DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT(3, 4),
+ /**
+ * DELETE_COLUMN_FAMILY_POST_OPERATION = 5;
+ */
+ DELETE_COLUMN_FAMILY_POST_OPERATION(4, 5),
+ /**
+ * DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 6;
+ */
+ DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS(5, 6),
+ ;
+
+ /**
+ * DELETE_COLUMN_FAMILY_PREPARE = 1;
+ */
+ public static final int DELETE_COLUMN_FAMILY_PREPARE_VALUE = 1;
+ /**
+ * DELETE_COLUMN_FAMILY_PRE_OPERATION = 2;
+ */
+ public static final int DELETE_COLUMN_FAMILY_PRE_OPERATION_VALUE = 2;
+ /**
+ * DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3;
+ */
+ public static final int DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR_VALUE = 3;
+ /**
+ * DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT = 4;
+ */
+ public static final int DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT_VALUE = 4;
+ /**
+ * DELETE_COLUMN_FAMILY_POST_OPERATION = 5;
+ */
+ public static final int DELETE_COLUMN_FAMILY_POST_OPERATION_VALUE = 5;
+ /**
+ * DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 6;
+ */
+ public static final int DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS_VALUE = 6;
+
+
+ public final int getNumber() { return value; }
+
+ public static DeleteColumnFamilyState valueOf(int value) {
+ switch (value) {
+ case 1: return DELETE_COLUMN_FAMILY_PREPARE;
+ case 2: return DELETE_COLUMN_FAMILY_PRE_OPERATION;
+ case 3: return DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR;
+ case 4: return DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT;
+ case 5: return DELETE_COLUMN_FAMILY_POST_OPERATION;
+ case 6: return DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap() {
+ public DeleteColumnFamilyState findValueByNumber(int number) {
+ return DeleteColumnFamilyState.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.getDescriptor().getEnumTypes().get(5);
+ }
+
+ private static final DeleteColumnFamilyState[] VALUES = values();
+
+ public static DeleteColumnFamilyState valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private DeleteColumnFamilyState(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:DeleteColumnFamilyState)
+ }
+
public interface CreateTableStateDataOrBuilder
extends com.google.protobuf.MessageOrBuilder {
@@ -3776,6 +4112,3514 @@ public final class MasterProcedureProtos {
// @@protoc_insertion_point(class_scope:DeleteTableStateData)
}
+ public interface AddColumnFamilyStateDataOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .UserInformation user_info = 1;
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ boolean hasUserInfo();
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo();
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder();
+
+ // required .TableName table_name = 2;
+ /**
+ * required .TableName table_name = 2;
+ */
+ boolean hasTableName();
+ /**
+ * required .TableName table_name = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ /**
+ * required .TableName table_name = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+
+ // required .ColumnFamilySchema columnfamily_schema = 3;
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ boolean hasColumnfamilySchema();
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema();
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder();
+
+ // optional .TableSchema unmodified_table_schema = 4;
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ boolean hasUnmodifiedTableSchema();
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema();
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder();
+ }
+ /**
+ * Protobuf type {@code AddColumnFamilyStateData}
+ */
+ public static final class AddColumnFamilyStateData extends
+ com.google.protobuf.GeneratedMessage
+ implements AddColumnFamilyStateDataOrBuilder {
+ // Use AddColumnFamilyStateData.newBuilder() to construct.
+ private AddColumnFamilyStateData(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private AddColumnFamilyStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final AddColumnFamilyStateData defaultInstance;
+ public static AddColumnFamilyStateData getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public AddColumnFamilyStateData getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private AddColumnFamilyStateData(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = userInfo_.toBuilder();
+ }
+ userInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(userInfo_);
+ userInfo_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ subBuilder = tableName_.toBuilder();
+ }
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(tableName_);
+ tableName_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000002;
+ break;
+ }
+ case 26: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ subBuilder = columnfamilySchema_.toBuilder();
+ }
+ columnfamilySchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(columnfamilySchema_);
+ columnfamilySchema_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000004;
+ break;
+ }
+ case 34: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ subBuilder = unmodifiedTableSchema_.toBuilder();
+ }
+ unmodifiedTableSchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(unmodifiedTableSchema_);
+ unmodifiedTableSchema_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000008;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_AddColumnFamilyStateData_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_AddColumnFamilyStateData_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData.class, org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public AddColumnFamilyStateData parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new AddColumnFamilyStateData(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required .UserInformation user_info = 1;
+ public static final int USER_INFO_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_;
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public boolean hasUserInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() {
+ return userInfo_;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() {
+ return userInfo_;
+ }
+
+ // required .TableName table_name = 2;
+ public static final int TABLE_NAME_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ /**
+ * required .TableName table_name = 2;
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ return tableName_;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ return tableName_;
+ }
+
+ // required .ColumnFamilySchema columnfamily_schema = 3;
+ public static final int COLUMNFAMILY_SCHEMA_FIELD_NUMBER = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_;
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public boolean hasColumnfamilySchema() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema() {
+ return columnfamilySchema_;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder() {
+ return columnfamilySchema_;
+ }
+
+ // optional .TableSchema unmodified_table_schema = 4;
+ public static final int UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_;
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public boolean hasUnmodifiedTableSchema() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() {
+ return unmodifiedTableSchema_;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() {
+ return unmodifiedTableSchema_;
+ }
+
+ private void initFields() {
+ userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+ unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasUserInfo()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasTableName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasColumnfamilySchema()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getUserInfo().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getTableName().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getColumnfamilySchema().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasUnmodifiedTableSchema()) {
+ if (!getUnmodifiedTableSchema().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, userInfo_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, columnfamilySchema_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeMessage(4, unmodifiedTableSchema_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, userInfo_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, columnfamilySchema_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(4, unmodifiedTableSchema_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData other = (org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData) obj;
+
+ boolean result = true;
+ result = result && (hasUserInfo() == other.hasUserInfo());
+ if (hasUserInfo()) {
+ result = result && getUserInfo()
+ .equals(other.getUserInfo());
+ }
+ result = result && (hasTableName() == other.hasTableName());
+ if (hasTableName()) {
+ result = result && getTableName()
+ .equals(other.getTableName());
+ }
+ result = result && (hasColumnfamilySchema() == other.hasColumnfamilySchema());
+ if (hasColumnfamilySchema()) {
+ result = result && getColumnfamilySchema()
+ .equals(other.getColumnfamilySchema());
+ }
+ result = result && (hasUnmodifiedTableSchema() == other.hasUnmodifiedTableSchema());
+ if (hasUnmodifiedTableSchema()) {
+ result = result && getUnmodifiedTableSchema()
+ .equals(other.getUnmodifiedTableSchema());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasUserInfo()) {
+ hash = (37 * hash) + USER_INFO_FIELD_NUMBER;
+ hash = (53 * hash) + getUserInfo().hashCode();
+ }
+ if (hasTableName()) {
+ hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableName().hashCode();
+ }
+ if (hasColumnfamilySchema()) {
+ hash = (37 * hash) + COLUMNFAMILY_SCHEMA_FIELD_NUMBER;
+ hash = (53 * hash) + getColumnfamilySchema().hashCode();
+ }
+ if (hasUnmodifiedTableSchema()) {
+ hash = (37 * hash) + UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER;
+ hash = (53 * hash) + getUnmodifiedTableSchema().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code AddColumnFamilyStateData}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateDataOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_AddColumnFamilyStateData_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_AddColumnFamilyStateData_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData.class, org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getUserInfoFieldBuilder();
+ getTableNameFieldBuilder();
+ getColumnfamilySchemaFieldBuilder();
+ getUnmodifiedTableSchemaFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (userInfoBuilder_ == null) {
+ userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ } else {
+ userInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (columnfamilySchemaBuilder_ == null) {
+ columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+ } else {
+ columnfamilySchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ } else {
+ unmodifiedTableSchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_AddColumnFamilyStateData_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData build() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData result = new org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (userInfoBuilder_ == null) {
+ result.userInfo_ = userInfo_;
+ } else {
+ result.userInfo_ = userInfoBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (tableNameBuilder_ == null) {
+ result.tableName_ = tableName_;
+ } else {
+ result.tableName_ = tableNameBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (columnfamilySchemaBuilder_ == null) {
+ result.columnfamilySchema_ = columnfamilySchema_;
+ } else {
+ result.columnfamilySchema_ = columnfamilySchemaBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ result.unmodifiedTableSchema_ = unmodifiedTableSchema_;
+ } else {
+ result.unmodifiedTableSchema_ = unmodifiedTableSchemaBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData.getDefaultInstance()) return this;
+ if (other.hasUserInfo()) {
+ mergeUserInfo(other.getUserInfo());
+ }
+ if (other.hasTableName()) {
+ mergeTableName(other.getTableName());
+ }
+ if (other.hasColumnfamilySchema()) {
+ mergeColumnfamilySchema(other.getColumnfamilySchema());
+ }
+ if (other.hasUnmodifiedTableSchema()) {
+ mergeUnmodifiedTableSchema(other.getUnmodifiedTableSchema());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasUserInfo()) {
+
+ return false;
+ }
+ if (!hasTableName()) {
+
+ return false;
+ }
+ if (!hasColumnfamilySchema()) {
+
+ return false;
+ }
+ if (!getUserInfo().isInitialized()) {
+
+ return false;
+ }
+ if (!getTableName().isInitialized()) {
+
+ return false;
+ }
+ if (!getColumnfamilySchema().isInitialized()) {
+
+ return false;
+ }
+ if (hasUnmodifiedTableSchema()) {
+ if (!getUnmodifiedTableSchema().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyStateData) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required .UserInformation user_info = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_;
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public boolean hasUserInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() {
+ if (userInfoBuilder_ == null) {
+ return userInfo_;
+ } else {
+ return userInfoBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder setUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) {
+ if (userInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ userInfo_ = value;
+ onChanged();
+ } else {
+ userInfoBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder setUserInfo(
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) {
+ if (userInfoBuilder_ == null) {
+ userInfo_ = builderForValue.build();
+ onChanged();
+ } else {
+ userInfoBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder mergeUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) {
+ if (userInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ userInfo_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) {
+ userInfo_ =
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial();
+ } else {
+ userInfo_ = value;
+ }
+ onChanged();
+ } else {
+ userInfoBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder clearUserInfo() {
+ if (userInfoBuilder_ == null) {
+ userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ onChanged();
+ } else {
+ userInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getUserInfoFieldBuilder().getBuilder();
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() {
+ if (userInfoBuilder_ != null) {
+ return userInfoBuilder_.getMessageOrBuilder();
+ } else {
+ return userInfo_;
+ }
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>
+ getUserInfoFieldBuilder() {
+ if (userInfoBuilder_ == null) {
+ userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>(
+ userInfo_,
+ getParentForChildren(),
+ isClean());
+ userInfo_ = null;
+ }
+ return userInfoBuilder_;
+ }
+
+ // required .TableName table_name = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ /**
+ * required .TableName table_name = 2;
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ if (tableNameBuilder_ == null) {
+ return tableName_;
+ } else {
+ return tableNameBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ tableName_ = value;
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder setTableName(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ tableName_ = builderForValue.build();
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ } else {
+ tableName_ = value;
+ }
+ onChanged();
+ } else {
+ tableNameBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder clearTableName() {
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ onChanged();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getTableNameFieldBuilder().getBuilder();
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ if (tableNameBuilder_ != null) {
+ return tableNameBuilder_.getMessageOrBuilder();
+ } else {
+ return tableName_;
+ }
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameFieldBuilder() {
+ if (tableNameBuilder_ == null) {
+ tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ tableName_,
+ getParentForChildren(),
+ isClean());
+ tableName_ = null;
+ }
+ return tableNameBuilder_;
+ }
+
+ // required .ColumnFamilySchema columnfamily_schema = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnfamilySchemaBuilder_;
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public boolean hasColumnfamilySchema() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema() {
+ if (columnfamilySchemaBuilder_ == null) {
+ return columnfamilySchema_;
+ } else {
+ return columnfamilySchemaBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public Builder setColumnfamilySchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
+ if (columnfamilySchemaBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ columnfamilySchema_ = value;
+ onChanged();
+ } else {
+ columnfamilySchemaBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public Builder setColumnfamilySchema(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) {
+ if (columnfamilySchemaBuilder_ == null) {
+ columnfamilySchema_ = builderForValue.build();
+ onChanged();
+ } else {
+ columnfamilySchemaBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public Builder mergeColumnfamilySchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
+ if (columnfamilySchemaBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ columnfamilySchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) {
+ columnfamilySchema_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnfamilySchema_).mergeFrom(value).buildPartial();
+ } else {
+ columnfamilySchema_ = value;
+ }
+ onChanged();
+ } else {
+ columnfamilySchemaBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public Builder clearColumnfamilySchema() {
+ if (columnfamilySchemaBuilder_ == null) {
+ columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+ onChanged();
+ } else {
+ columnfamilySchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnfamilySchemaBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getColumnfamilySchemaFieldBuilder().getBuilder();
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder() {
+ if (columnfamilySchemaBuilder_ != null) {
+ return columnfamilySchemaBuilder_.getMessageOrBuilder();
+ } else {
+ return columnfamilySchema_;
+ }
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
+ getColumnfamilySchemaFieldBuilder() {
+ if (columnfamilySchemaBuilder_ == null) {
+ columnfamilySchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>(
+ columnfamilySchema_,
+ getParentForChildren(),
+ isClean());
+ columnfamilySchema_ = null;
+ }
+ return columnfamilySchemaBuilder_;
+ }
+
+ // optional .TableSchema unmodified_table_schema = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> unmodifiedTableSchemaBuilder_;
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public boolean hasUnmodifiedTableSchema() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ return unmodifiedTableSchema_;
+ } else {
+ return unmodifiedTableSchemaBuilder_.getMessage();
+ }
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder setUnmodifiedTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ unmodifiedTableSchema_ = value;
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder setUnmodifiedTableSchema(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchema_ = builderForValue.build();
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder mergeUnmodifiedTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ if (((bitField0_ & 0x00000008) == 0x00000008) &&
+ unmodifiedTableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) {
+ unmodifiedTableSchema_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(unmodifiedTableSchema_).mergeFrom(value).buildPartial();
+ } else {
+ unmodifiedTableSchema_ = value;
+ }
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder clearUnmodifiedTableSchema() {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getUnmodifiedTableSchemaBuilder() {
+ bitField0_ |= 0x00000008;
+ onChanged();
+ return getUnmodifiedTableSchemaFieldBuilder().getBuilder();
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() {
+ if (unmodifiedTableSchemaBuilder_ != null) {
+ return unmodifiedTableSchemaBuilder_.getMessageOrBuilder();
+ } else {
+ return unmodifiedTableSchema_;
+ }
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
+ getUnmodifiedTableSchemaFieldBuilder() {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>(
+ unmodifiedTableSchema_,
+ getParentForChildren(),
+ isClean());
+ unmodifiedTableSchema_ = null;
+ }
+ return unmodifiedTableSchemaBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:AddColumnFamilyStateData)
+ }
+
+ static {
+ defaultInstance = new AddColumnFamilyStateData(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:AddColumnFamilyStateData)
+ }
+
+ public interface ModifyColumnFamilyStateDataOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .UserInformation user_info = 1;
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ boolean hasUserInfo();
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo();
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder();
+
+ // required .TableName table_name = 2;
+ /**
+ * required .TableName table_name = 2;
+ */
+ boolean hasTableName();
+ /**
+ * required .TableName table_name = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ /**
+ * required .TableName table_name = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+
+ // required .ColumnFamilySchema columnfamily_schema = 3;
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ boolean hasColumnfamilySchema();
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema();
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder();
+
+ // optional .TableSchema unmodified_table_schema = 4;
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ boolean hasUnmodifiedTableSchema();
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema();
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder();
+ }
+ /**
+ * Protobuf type {@code ModifyColumnFamilyStateData}
+ */
+ public static final class ModifyColumnFamilyStateData extends
+ com.google.protobuf.GeneratedMessage
+ implements ModifyColumnFamilyStateDataOrBuilder {
+ // Use ModifyColumnFamilyStateData.newBuilder() to construct.
+ private ModifyColumnFamilyStateData(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private ModifyColumnFamilyStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final ModifyColumnFamilyStateData defaultInstance;
+ public static ModifyColumnFamilyStateData getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public ModifyColumnFamilyStateData getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private ModifyColumnFamilyStateData(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = userInfo_.toBuilder();
+ }
+ userInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(userInfo_);
+ userInfo_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ subBuilder = tableName_.toBuilder();
+ }
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(tableName_);
+ tableName_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000002;
+ break;
+ }
+ case 26: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ subBuilder = columnfamilySchema_.toBuilder();
+ }
+ columnfamilySchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(columnfamilySchema_);
+ columnfamilySchema_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000004;
+ break;
+ }
+ case 34: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ subBuilder = unmodifiedTableSchema_.toBuilder();
+ }
+ unmodifiedTableSchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(unmodifiedTableSchema_);
+ unmodifiedTableSchema_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000008;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_ModifyColumnFamilyStateData_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_ModifyColumnFamilyStateData_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData.class, org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public ModifyColumnFamilyStateData parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new ModifyColumnFamilyStateData(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required .UserInformation user_info = 1;
+ public static final int USER_INFO_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_;
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public boolean hasUserInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() {
+ return userInfo_;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() {
+ return userInfo_;
+ }
+
+ // required .TableName table_name = 2;
+ public static final int TABLE_NAME_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ /**
+ * required .TableName table_name = 2;
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ return tableName_;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ return tableName_;
+ }
+
+ // required .ColumnFamilySchema columnfamily_schema = 3;
+ public static final int COLUMNFAMILY_SCHEMA_FIELD_NUMBER = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_;
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public boolean hasColumnfamilySchema() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema() {
+ return columnfamilySchema_;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder() {
+ return columnfamilySchema_;
+ }
+
+ // optional .TableSchema unmodified_table_schema = 4;
+ public static final int UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_;
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public boolean hasUnmodifiedTableSchema() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() {
+ return unmodifiedTableSchema_;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() {
+ return unmodifiedTableSchema_;
+ }
+
+ private void initFields() {
+ userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+ unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasUserInfo()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasTableName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasColumnfamilySchema()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getUserInfo().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getTableName().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getColumnfamilySchema().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasUnmodifiedTableSchema()) {
+ if (!getUnmodifiedTableSchema().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, userInfo_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, columnfamilySchema_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeMessage(4, unmodifiedTableSchema_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, userInfo_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, columnfamilySchema_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(4, unmodifiedTableSchema_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData other = (org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData) obj;
+
+ boolean result = true;
+ result = result && (hasUserInfo() == other.hasUserInfo());
+ if (hasUserInfo()) {
+ result = result && getUserInfo()
+ .equals(other.getUserInfo());
+ }
+ result = result && (hasTableName() == other.hasTableName());
+ if (hasTableName()) {
+ result = result && getTableName()
+ .equals(other.getTableName());
+ }
+ result = result && (hasColumnfamilySchema() == other.hasColumnfamilySchema());
+ if (hasColumnfamilySchema()) {
+ result = result && getColumnfamilySchema()
+ .equals(other.getColumnfamilySchema());
+ }
+ result = result && (hasUnmodifiedTableSchema() == other.hasUnmodifiedTableSchema());
+ if (hasUnmodifiedTableSchema()) {
+ result = result && getUnmodifiedTableSchema()
+ .equals(other.getUnmodifiedTableSchema());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasUserInfo()) {
+ hash = (37 * hash) + USER_INFO_FIELD_NUMBER;
+ hash = (53 * hash) + getUserInfo().hashCode();
+ }
+ if (hasTableName()) {
+ hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableName().hashCode();
+ }
+ if (hasColumnfamilySchema()) {
+ hash = (37 * hash) + COLUMNFAMILY_SCHEMA_FIELD_NUMBER;
+ hash = (53 * hash) + getColumnfamilySchema().hashCode();
+ }
+ if (hasUnmodifiedTableSchema()) {
+ hash = (37 * hash) + UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER;
+ hash = (53 * hash) + getUnmodifiedTableSchema().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code ModifyColumnFamilyStateData}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateDataOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_ModifyColumnFamilyStateData_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_ModifyColumnFamilyStateData_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData.class, org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getUserInfoFieldBuilder();
+ getTableNameFieldBuilder();
+ getColumnfamilySchemaFieldBuilder();
+ getUnmodifiedTableSchemaFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (userInfoBuilder_ == null) {
+ userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ } else {
+ userInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (columnfamilySchemaBuilder_ == null) {
+ columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+ } else {
+ columnfamilySchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ } else {
+ unmodifiedTableSchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_ModifyColumnFamilyStateData_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData build() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData result = new org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (userInfoBuilder_ == null) {
+ result.userInfo_ = userInfo_;
+ } else {
+ result.userInfo_ = userInfoBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (tableNameBuilder_ == null) {
+ result.tableName_ = tableName_;
+ } else {
+ result.tableName_ = tableNameBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (columnfamilySchemaBuilder_ == null) {
+ result.columnfamilySchema_ = columnfamilySchema_;
+ } else {
+ result.columnfamilySchema_ = columnfamilySchemaBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ result.unmodifiedTableSchema_ = unmodifiedTableSchema_;
+ } else {
+ result.unmodifiedTableSchema_ = unmodifiedTableSchemaBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData.getDefaultInstance()) return this;
+ if (other.hasUserInfo()) {
+ mergeUserInfo(other.getUserInfo());
+ }
+ if (other.hasTableName()) {
+ mergeTableName(other.getTableName());
+ }
+ if (other.hasColumnfamilySchema()) {
+ mergeColumnfamilySchema(other.getColumnfamilySchema());
+ }
+ if (other.hasUnmodifiedTableSchema()) {
+ mergeUnmodifiedTableSchema(other.getUnmodifiedTableSchema());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasUserInfo()) {
+
+ return false;
+ }
+ if (!hasTableName()) {
+
+ return false;
+ }
+ if (!hasColumnfamilySchema()) {
+
+ return false;
+ }
+ if (!getUserInfo().isInitialized()) {
+
+ return false;
+ }
+ if (!getTableName().isInitialized()) {
+
+ return false;
+ }
+ if (!getColumnfamilySchema().isInitialized()) {
+
+ return false;
+ }
+ if (hasUnmodifiedTableSchema()) {
+ if (!getUnmodifiedTableSchema().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyStateData) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required .UserInformation user_info = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_;
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public boolean hasUserInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() {
+ if (userInfoBuilder_ == null) {
+ return userInfo_;
+ } else {
+ return userInfoBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder setUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) {
+ if (userInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ userInfo_ = value;
+ onChanged();
+ } else {
+ userInfoBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder setUserInfo(
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) {
+ if (userInfoBuilder_ == null) {
+ userInfo_ = builderForValue.build();
+ onChanged();
+ } else {
+ userInfoBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder mergeUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) {
+ if (userInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ userInfo_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) {
+ userInfo_ =
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial();
+ } else {
+ userInfo_ = value;
+ }
+ onChanged();
+ } else {
+ userInfoBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder clearUserInfo() {
+ if (userInfoBuilder_ == null) {
+ userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ onChanged();
+ } else {
+ userInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getUserInfoFieldBuilder().getBuilder();
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() {
+ if (userInfoBuilder_ != null) {
+ return userInfoBuilder_.getMessageOrBuilder();
+ } else {
+ return userInfo_;
+ }
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>
+ getUserInfoFieldBuilder() {
+ if (userInfoBuilder_ == null) {
+ userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>(
+ userInfo_,
+ getParentForChildren(),
+ isClean());
+ userInfo_ = null;
+ }
+ return userInfoBuilder_;
+ }
+
+ // required .TableName table_name = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ /**
+ * required .TableName table_name = 2;
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ if (tableNameBuilder_ == null) {
+ return tableName_;
+ } else {
+ return tableNameBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ tableName_ = value;
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder setTableName(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ tableName_ = builderForValue.build();
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ } else {
+ tableName_ = value;
+ }
+ onChanged();
+ } else {
+ tableNameBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder clearTableName() {
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ onChanged();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getTableNameFieldBuilder().getBuilder();
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ if (tableNameBuilder_ != null) {
+ return tableNameBuilder_.getMessageOrBuilder();
+ } else {
+ return tableName_;
+ }
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameFieldBuilder() {
+ if (tableNameBuilder_ == null) {
+ tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ tableName_,
+ getParentForChildren(),
+ isClean());
+ tableName_ = null;
+ }
+ return tableNameBuilder_;
+ }
+
+ // required .ColumnFamilySchema columnfamily_schema = 3;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder> columnfamilySchemaBuilder_;
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public boolean hasColumnfamilySchema() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema getColumnfamilySchema() {
+ if (columnfamilySchemaBuilder_ == null) {
+ return columnfamilySchema_;
+ } else {
+ return columnfamilySchemaBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public Builder setColumnfamilySchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
+ if (columnfamilySchemaBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ columnfamilySchema_ = value;
+ onChanged();
+ } else {
+ columnfamilySchemaBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public Builder setColumnfamilySchema(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder builderForValue) {
+ if (columnfamilySchemaBuilder_ == null) {
+ columnfamilySchema_ = builderForValue.build();
+ onChanged();
+ } else {
+ columnfamilySchemaBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public Builder mergeColumnfamilySchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema value) {
+ if (columnfamilySchemaBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ columnfamilySchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance()) {
+ columnfamilySchema_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.newBuilder(columnfamilySchema_).mergeFrom(value).buildPartial();
+ } else {
+ columnfamilySchema_ = value;
+ }
+ onChanged();
+ } else {
+ columnfamilySchemaBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public Builder clearColumnfamilySchema() {
+ if (columnfamilySchemaBuilder_ == null) {
+ columnfamilySchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.getDefaultInstance();
+ onChanged();
+ } else {
+ columnfamilySchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder getColumnfamilySchemaBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getColumnfamilySchemaFieldBuilder().getBuilder();
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnfamilySchemaOrBuilder() {
+ if (columnfamilySchemaBuilder_ != null) {
+ return columnfamilySchemaBuilder_.getMessageOrBuilder();
+ } else {
+ return columnfamilySchema_;
+ }
+ }
+ /**
+ * required .ColumnFamilySchema columnfamily_schema = 3;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
+ getColumnfamilySchemaFieldBuilder() {
+ if (columnfamilySchemaBuilder_ == null) {
+ columnfamilySchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>(
+ columnfamilySchema_,
+ getParentForChildren(),
+ isClean());
+ columnfamilySchema_ = null;
+ }
+ return columnfamilySchemaBuilder_;
+ }
+
+ // optional .TableSchema unmodified_table_schema = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> unmodifiedTableSchemaBuilder_;
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public boolean hasUnmodifiedTableSchema() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ return unmodifiedTableSchema_;
+ } else {
+ return unmodifiedTableSchemaBuilder_.getMessage();
+ }
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder setUnmodifiedTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ unmodifiedTableSchema_ = value;
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder setUnmodifiedTableSchema(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchema_ = builderForValue.build();
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder mergeUnmodifiedTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ if (((bitField0_ & 0x00000008) == 0x00000008) &&
+ unmodifiedTableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) {
+ unmodifiedTableSchema_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(unmodifiedTableSchema_).mergeFrom(value).buildPartial();
+ } else {
+ unmodifiedTableSchema_ = value;
+ }
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder clearUnmodifiedTableSchema() {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getUnmodifiedTableSchemaBuilder() {
+ bitField0_ |= 0x00000008;
+ onChanged();
+ return getUnmodifiedTableSchemaFieldBuilder().getBuilder();
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() {
+ if (unmodifiedTableSchemaBuilder_ != null) {
+ return unmodifiedTableSchemaBuilder_.getMessageOrBuilder();
+ } else {
+ return unmodifiedTableSchema_;
+ }
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
+ getUnmodifiedTableSchemaFieldBuilder() {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>(
+ unmodifiedTableSchema_,
+ getParentForChildren(),
+ isClean());
+ unmodifiedTableSchema_ = null;
+ }
+ return unmodifiedTableSchemaBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:ModifyColumnFamilyStateData)
+ }
+
+ static {
+ defaultInstance = new ModifyColumnFamilyStateData(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:ModifyColumnFamilyStateData)
+ }
+
+ public interface DeleteColumnFamilyStateDataOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .UserInformation user_info = 1;
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ boolean hasUserInfo();
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo();
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder();
+
+ // required .TableName table_name = 2;
+ /**
+ * required .TableName table_name = 2;
+ */
+ boolean hasTableName();
+ /**
+ * required .TableName table_name = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
+ /**
+ * required .TableName table_name = 2;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
+
+ // required bytes columnfamily_name = 3;
+ /**
+ * required bytes columnfamily_name = 3;
+ */
+ boolean hasColumnfamilyName();
+ /**
+ * required bytes columnfamily_name = 3;
+ */
+ com.google.protobuf.ByteString getColumnfamilyName();
+
+ // optional .TableSchema unmodified_table_schema = 4;
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ boolean hasUnmodifiedTableSchema();
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema();
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder();
+ }
+ /**
+ * Protobuf type {@code DeleteColumnFamilyStateData}
+ */
+ public static final class DeleteColumnFamilyStateData extends
+ com.google.protobuf.GeneratedMessage
+ implements DeleteColumnFamilyStateDataOrBuilder {
+ // Use DeleteColumnFamilyStateData.newBuilder() to construct.
+ private DeleteColumnFamilyStateData(com.google.protobuf.GeneratedMessage.Builder> builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private DeleteColumnFamilyStateData(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final DeleteColumnFamilyStateData defaultInstance;
+ public static DeleteColumnFamilyStateData getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public DeleteColumnFamilyStateData getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private DeleteColumnFamilyStateData(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = userInfo_.toBuilder();
+ }
+ userInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(userInfo_);
+ userInfo_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 18: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ subBuilder = tableName_.toBuilder();
+ }
+ tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(tableName_);
+ tableName_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000002;
+ break;
+ }
+ case 26: {
+ bitField0_ |= 0x00000004;
+ columnfamilyName_ = input.readBytes();
+ break;
+ }
+ case 34: {
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder subBuilder = null;
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ subBuilder = unmodifiedTableSchema_.toBuilder();
+ }
+ unmodifiedTableSchema_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.PARSER, extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(unmodifiedTableSchema_);
+ unmodifiedTableSchema_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000008;
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_DeleteColumnFamilyStateData_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_DeleteColumnFamilyStateData_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData.class, org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser PARSER =
+ new com.google.protobuf.AbstractParser() {
+ public DeleteColumnFamilyStateData parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new DeleteColumnFamilyStateData(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required .UserInformation user_info = 1;
+ public static final int USER_INFO_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_;
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public boolean hasUserInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() {
+ return userInfo_;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() {
+ return userInfo_;
+ }
+
+ // required .TableName table_name = 2;
+ public static final int TABLE_NAME_FIELD_NUMBER = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
+ /**
+ * required .TableName table_name = 2;
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ return tableName_;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ return tableName_;
+ }
+
+ // required bytes columnfamily_name = 3;
+ public static final int COLUMNFAMILY_NAME_FIELD_NUMBER = 3;
+ private com.google.protobuf.ByteString columnfamilyName_;
+ /**
+ * required bytes columnfamily_name = 3;
+ */
+ public boolean hasColumnfamilyName() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * required bytes columnfamily_name = 3;
+ */
+ public com.google.protobuf.ByteString getColumnfamilyName() {
+ return columnfamilyName_;
+ }
+
+ // optional .TableSchema unmodified_table_schema = 4;
+ public static final int UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_;
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public boolean hasUnmodifiedTableSchema() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() {
+ return unmodifiedTableSchema_;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() {
+ return unmodifiedTableSchema_;
+ }
+
+ private void initFields() {
+ userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ columnfamilyName_ = com.google.protobuf.ByteString.EMPTY;
+ unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasUserInfo()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasTableName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasColumnfamilyName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getUserInfo().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getTableName().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (hasUnmodifiedTableSchema()) {
+ if (!getUnmodifiedTableSchema().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, userInfo_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeMessage(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeBytes(3, columnfamilyName_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeMessage(4, unmodifiedTableSchema_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, userInfo_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(2, tableName_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(3, columnfamilyName_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(4, unmodifiedTableSchema_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData other = (org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData) obj;
+
+ boolean result = true;
+ result = result && (hasUserInfo() == other.hasUserInfo());
+ if (hasUserInfo()) {
+ result = result && getUserInfo()
+ .equals(other.getUserInfo());
+ }
+ result = result && (hasTableName() == other.hasTableName());
+ if (hasTableName()) {
+ result = result && getTableName()
+ .equals(other.getTableName());
+ }
+ result = result && (hasColumnfamilyName() == other.hasColumnfamilyName());
+ if (hasColumnfamilyName()) {
+ result = result && getColumnfamilyName()
+ .equals(other.getColumnfamilyName());
+ }
+ result = result && (hasUnmodifiedTableSchema() == other.hasUnmodifiedTableSchema());
+ if (hasUnmodifiedTableSchema()) {
+ result = result && getUnmodifiedTableSchema()
+ .equals(other.getUnmodifiedTableSchema());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasUserInfo()) {
+ hash = (37 * hash) + USER_INFO_FIELD_NUMBER;
+ hash = (53 * hash) + getUserInfo().hashCode();
+ }
+ if (hasTableName()) {
+ hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getTableName().hashCode();
+ }
+ if (hasColumnfamilyName()) {
+ hash = (37 * hash) + COLUMNFAMILY_NAME_FIELD_NUMBER;
+ hash = (53 * hash) + getColumnfamilyName().hashCode();
+ }
+ if (hasUnmodifiedTableSchema()) {
+ hash = (37 * hash) + UNMODIFIED_TABLE_SCHEMA_FIELD_NUMBER;
+ hash = (53 * hash) + getUnmodifiedTableSchema().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code DeleteColumnFamilyStateData}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateDataOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_DeleteColumnFamilyStateData_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_DeleteColumnFamilyStateData_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData.class, org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getUserInfoFieldBuilder();
+ getTableNameFieldBuilder();
+ getUnmodifiedTableSchemaFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (userInfoBuilder_ == null) {
+ userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ } else {
+ userInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ columnfamilyName_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ } else {
+ unmodifiedTableSchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.internal_static_DeleteColumnFamilyStateData_descriptor;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData getDefaultInstanceForType() {
+ return org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData build() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData buildPartial() {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData result = new org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (userInfoBuilder_ == null) {
+ result.userInfo_ = userInfo_;
+ } else {
+ result.userInfo_ = userInfoBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ if (tableNameBuilder_ == null) {
+ result.tableName_ = tableName_;
+ } else {
+ result.tableName_ = tableNameBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.columnfamilyName_ = columnfamilyName_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ result.unmodifiedTableSchema_ = unmodifiedTableSchema_;
+ } else {
+ result.unmodifiedTableSchema_ = unmodifiedTableSchemaBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData) {
+ return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData other) {
+ if (other == org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData.getDefaultInstance()) return this;
+ if (other.hasUserInfo()) {
+ mergeUserInfo(other.getUserInfo());
+ }
+ if (other.hasTableName()) {
+ mergeTableName(other.getTableName());
+ }
+ if (other.hasColumnfamilyName()) {
+ setColumnfamilyName(other.getColumnfamilyName());
+ }
+ if (other.hasUnmodifiedTableSchema()) {
+ mergeUnmodifiedTableSchema(other.getUnmodifiedTableSchema());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasUserInfo()) {
+
+ return false;
+ }
+ if (!hasTableName()) {
+
+ return false;
+ }
+ if (!hasColumnfamilyName()) {
+
+ return false;
+ }
+ if (!getUserInfo().isInitialized()) {
+
+ return false;
+ }
+ if (!getTableName().isInitialized()) {
+
+ return false;
+ }
+ if (hasUnmodifiedTableSchema()) {
+ if (!getUnmodifiedTableSchema().isInitialized()) {
+
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyStateData) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required .UserInformation user_info = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder> userInfoBuilder_;
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public boolean hasUserInfo() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation getUserInfo() {
+ if (userInfoBuilder_ == null) {
+ return userInfo_;
+ } else {
+ return userInfoBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder setUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) {
+ if (userInfoBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ userInfo_ = value;
+ onChanged();
+ } else {
+ userInfoBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder setUserInfo(
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder builderForValue) {
+ if (userInfoBuilder_ == null) {
+ userInfo_ = builderForValue.build();
+ onChanged();
+ } else {
+ userInfoBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder mergeUserInfo(org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation value) {
+ if (userInfoBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ userInfo_ != org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance()) {
+ userInfo_ =
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.newBuilder(userInfo_).mergeFrom(value).buildPartial();
+ } else {
+ userInfo_ = value;
+ }
+ onChanged();
+ } else {
+ userInfoBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public Builder clearUserInfo() {
+ if (userInfoBuilder_ == null) {
+ userInfo_ = org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.getDefaultInstance();
+ onChanged();
+ } else {
+ userInfoBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder getUserInfoBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getUserInfoFieldBuilder().getBuilder();
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder getUserInfoOrBuilder() {
+ if (userInfoBuilder_ != null) {
+ return userInfoBuilder_.getMessageOrBuilder();
+ } else {
+ return userInfo_;
+ }
+ }
+ /**
+ * required .UserInformation user_info = 1;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>
+ getUserInfoFieldBuilder() {
+ if (userInfoBuilder_ == null) {
+ userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation.Builder, org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformationOrBuilder>(
+ userInfo_,
+ getParentForChildren(),
+ isClean());
+ userInfo_ = null;
+ }
+ return userInfoBuilder_;
+ }
+
+ // required .TableName table_name = 2;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
+ /**
+ * required .TableName table_name = 2;
+ */
+ public boolean hasTableName() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
+ if (tableNameBuilder_ == null) {
+ return tableName_;
+ } else {
+ return tableNameBuilder_.getMessage();
+ }
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ tableName_ = value;
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder setTableName(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
+ if (tableNameBuilder_ == null) {
+ tableName_ = builderForValue.build();
+ onChanged();
+ } else {
+ tableNameBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
+ if (tableNameBuilder_ == null) {
+ if (((bitField0_ & 0x00000002) == 0x00000002) &&
+ tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
+ tableName_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
+ } else {
+ tableName_ = value;
+ }
+ onChanged();
+ } else {
+ tableNameBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000002;
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public Builder clearTableName() {
+ if (tableNameBuilder_ == null) {
+ tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+ onChanged();
+ } else {
+ tableNameBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
+ bitField0_ |= 0x00000002;
+ onChanged();
+ return getTableNameFieldBuilder().getBuilder();
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
+ if (tableNameBuilder_ != null) {
+ return tableNameBuilder_.getMessageOrBuilder();
+ } else {
+ return tableName_;
+ }
+ }
+ /**
+ * required .TableName table_name = 2;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
+ getTableNameFieldBuilder() {
+ if (tableNameBuilder_ == null) {
+ tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
+ tableName_,
+ getParentForChildren(),
+ isClean());
+ tableName_ = null;
+ }
+ return tableNameBuilder_;
+ }
+
+ // required bytes columnfamily_name = 3;
+ private com.google.protobuf.ByteString columnfamilyName_ = com.google.protobuf.ByteString.EMPTY;
+ /**
+ * required bytes columnfamily_name = 3;
+ */
+ public boolean hasColumnfamilyName() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * required bytes columnfamily_name = 3;
+ */
+ public com.google.protobuf.ByteString getColumnfamilyName() {
+ return columnfamilyName_;
+ }
+ /**
+ * required bytes columnfamily_name = 3;
+ */
+ public Builder setColumnfamilyName(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000004;
+ columnfamilyName_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * required bytes columnfamily_name = 3;
+ */
+ public Builder clearColumnfamilyName() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ columnfamilyName_ = getDefaultInstance().getColumnfamilyName();
+ onChanged();
+ return this;
+ }
+
+ // optional .TableSchema unmodified_table_schema = 4;
+ private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder> unmodifiedTableSchemaBuilder_;
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public boolean hasUnmodifiedTableSchema() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema getUnmodifiedTableSchema() {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ return unmodifiedTableSchema_;
+ } else {
+ return unmodifiedTableSchemaBuilder_.getMessage();
+ }
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder setUnmodifiedTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ unmodifiedTableSchema_ = value;
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder setUnmodifiedTableSchema(
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder builderForValue) {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchema_ = builderForValue.build();
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder mergeUnmodifiedTableSchema(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema value) {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ if (((bitField0_ & 0x00000008) == 0x00000008) &&
+ unmodifiedTableSchema_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance()) {
+ unmodifiedTableSchema_ =
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.newBuilder(unmodifiedTableSchema_).mergeFrom(value).buildPartial();
+ } else {
+ unmodifiedTableSchema_ = value;
+ }
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000008;
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public Builder clearUnmodifiedTableSchema() {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchema_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.getDefaultInstance();
+ onChanged();
+ } else {
+ unmodifiedTableSchemaBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder getUnmodifiedTableSchemaBuilder() {
+ bitField0_ |= 0x00000008;
+ onChanged();
+ return getUnmodifiedTableSchemaFieldBuilder().getBuilder();
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder getUnmodifiedTableSchemaOrBuilder() {
+ if (unmodifiedTableSchemaBuilder_ != null) {
+ return unmodifiedTableSchemaBuilder_.getMessageOrBuilder();
+ } else {
+ return unmodifiedTableSchema_;
+ }
+ }
+ /**
+ * optional .TableSchema unmodified_table_schema = 4;
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>
+ getUnmodifiedTableSchemaFieldBuilder() {
+ if (unmodifiedTableSchemaBuilder_ == null) {
+ unmodifiedTableSchemaBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchemaOrBuilder>(
+ unmodifiedTableSchema_,
+ getParentForChildren(),
+ isClean());
+ unmodifiedTableSchema_ = null;
+ }
+ return unmodifiedTableSchemaBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:DeleteColumnFamilyStateData)
+ }
+
+ static {
+ defaultInstance = new DeleteColumnFamilyStateData(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:DeleteColumnFamilyStateData)
+ }
+
private static com.google.protobuf.Descriptors.Descriptor
internal_static_CreateTableStateData_descriptor;
private static
@@ -3791,6 +7635,21 @@ public final class MasterProcedureProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_DeleteTableStateData_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_AddColumnFamilyStateData_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_AddColumnFamilyStateData_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_ModifyColumnFamilyStateData_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_ModifyColumnFamilyStateData_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_DeleteColumnFamilyStateData_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_DeleteColumnFamilyStateData_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
@@ -3812,27 +7671,59 @@ public final class MasterProcedureProtos {
"_modify\030\004 \002(\010\"}\n\024DeleteTableStateData\022#\n",
"\tuser_info\030\001 \002(\0132\020.UserInformation\022\036\n\nta" +
"ble_name\030\002 \002(\0132\n.TableName\022 \n\013region_inf" +
- "o\030\003 \003(\0132\013.RegionInfo*\330\001\n\020CreateTableStat" +
- "e\022\036\n\032CREATE_TABLE_PRE_OPERATION\020\001\022 \n\034CRE" +
- "ATE_TABLE_WRITE_FS_LAYOUT\020\002\022\034\n\030CREATE_TA" +
- "BLE_ADD_TO_META\020\003\022\037\n\033CREATE_TABLE_ASSIGN" +
- "_REGIONS\020\004\022\"\n\036CREATE_TABLE_UPDATE_DESC_C" +
- "ACHE\020\005\022\037\n\033CREATE_TABLE_POST_OPERATION\020\006*" +
- "\207\002\n\020ModifyTableState\022\030\n\024MODIFY_TABLE_PRE" +
- "PARE\020\001\022\036\n\032MODIFY_TABLE_PRE_OPERATION\020\002\022(",
- "\n$MODIFY_TABLE_UPDATE_TABLE_DESCRIPTOR\020\003" +
- "\022&\n\"MODIFY_TABLE_REMOVE_REPLICA_COLUMN\020\004" +
- "\022!\n\035MODIFY_TABLE_DELETE_FS_LAYOUT\020\005\022\037\n\033M" +
- "ODIFY_TABLE_POST_OPERATION\020\006\022#\n\037MODIFY_T" +
- "ABLE_REOPEN_ALL_REGIONS\020\007*\337\001\n\020DeleteTabl" +
- "eState\022\036\n\032DELETE_TABLE_PRE_OPERATION\020\001\022!" +
- "\n\035DELETE_TABLE_REMOVE_FROM_META\020\002\022 \n\034DEL" +
- "ETE_TABLE_CLEAR_FS_LAYOUT\020\003\022\"\n\036DELETE_TA" +
- "BLE_UPDATE_DESC_CACHE\020\004\022!\n\035DELETE_TABLE_" +
- "UNASSIGN_REGIONS\020\005\022\037\n\033DELETE_TABLE_POST_",
- "OPERATION\020\006BK\n*org.apache.hadoop.hbase.p" +
- "rotobuf.generatedB\025MasterProcedureProtos" +
- "H\001\210\001\001\240\001\001"
+ "o\030\003 \003(\0132\013.RegionInfo\"\300\001\n\030AddColumnFamily" +
+ "StateData\022#\n\tuser_info\030\001 \002(\0132\020.UserInfor" +
+ "mation\022\036\n\ntable_name\030\002 \002(\0132\n.TableName\0220" +
+ "\n\023columnfamily_schema\030\003 \002(\0132\023.ColumnFami" +
+ "lySchema\022-\n\027unmodified_table_schema\030\004 \001(" +
+ "\0132\014.TableSchema\"\303\001\n\033ModifyColumnFamilySt" +
+ "ateData\022#\n\tuser_info\030\001 \002(\0132\020.UserInforma" +
+ "tion\022\036\n\ntable_name\030\002 \002(\0132\n.TableName\0220\n\023",
+ "columnfamily_schema\030\003 \002(\0132\023.ColumnFamily" +
+ "Schema\022-\n\027unmodified_table_schema\030\004 \001(\0132" +
+ "\014.TableSchema\"\254\001\n\033DeleteColumnFamilyStat" +
+ "eData\022#\n\tuser_info\030\001 \002(\0132\020.UserInformati" +
+ "on\022\036\n\ntable_name\030\002 \002(\0132\n.TableName\022\031\n\021co" +
+ "lumnfamily_name\030\003 \002(\014\022-\n\027unmodified_tabl" +
+ "e_schema\030\004 \001(\0132\014.TableSchema*\330\001\n\020CreateT" +
+ "ableState\022\036\n\032CREATE_TABLE_PRE_OPERATION\020" +
+ "\001\022 \n\034CREATE_TABLE_WRITE_FS_LAYOUT\020\002\022\034\n\030C" +
+ "REATE_TABLE_ADD_TO_META\020\003\022\037\n\033CREATE_TABL",
+ "E_ASSIGN_REGIONS\020\004\022\"\n\036CREATE_TABLE_UPDAT" +
+ "E_DESC_CACHE\020\005\022\037\n\033CREATE_TABLE_POST_OPER" +
+ "ATION\020\006*\207\002\n\020ModifyTableState\022\030\n\024MODIFY_T" +
+ "ABLE_PREPARE\020\001\022\036\n\032MODIFY_TABLE_PRE_OPERA" +
+ "TION\020\002\022(\n$MODIFY_TABLE_UPDATE_TABLE_DESC" +
+ "RIPTOR\020\003\022&\n\"MODIFY_TABLE_REMOVE_REPLICA_" +
+ "COLUMN\020\004\022!\n\035MODIFY_TABLE_DELETE_FS_LAYOU" +
+ "T\020\005\022\037\n\033MODIFY_TABLE_POST_OPERATION\020\006\022#\n\037" +
+ "MODIFY_TABLE_REOPEN_ALL_REGIONS\020\007*\337\001\n\020De" +
+ "leteTableState\022\036\n\032DELETE_TABLE_PRE_OPERA",
+ "TION\020\001\022!\n\035DELETE_TABLE_REMOVE_FROM_META\020" +
+ "\002\022 \n\034DELETE_TABLE_CLEAR_FS_LAYOUT\020\003\022\"\n\036D" +
+ "ELETE_TABLE_UPDATE_DESC_CACHE\020\004\022!\n\035DELET" +
+ "E_TABLE_UNASSIGN_REGIONS\020\005\022\037\n\033DELETE_TAB" +
+ "LE_POST_OPERATION\020\006*\331\001\n\024AddColumnFamilyS" +
+ "tate\022\035\n\031ADD_COLUMN_FAMILY_PREPARE\020\001\022#\n\037A" +
+ "DD_COLUMN_FAMILY_PRE_OPERATION\020\002\022-\n)ADD_" +
+ "COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR\020\003\022" +
+ "$\n ADD_COLUMN_FAMILY_POST_OPERATION\020\004\022(\n" +
+ "$ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS\020\005*",
+ "\353\001\n\027ModifyColumnFamilyState\022 \n\034MODIFY_CO" +
+ "LUMN_FAMILY_PREPARE\020\001\022&\n\"MODIFY_COLUMN_F" +
+ "AMILY_PRE_OPERATION\020\002\0220\n,MODIFY_COLUMN_F" +
+ "AMILY_UPDATE_TABLE_DESCRIPTOR\020\003\022\'\n#MODIF" +
+ "Y_COLUMN_FAMILY_POST_OPERATION\020\004\022+\n\'MODI" +
+ "FY_COLUMN_FAMILY_REOPEN_ALL_REGIONS\020\005*\226\002" +
+ "\n\027DeleteColumnFamilyState\022 \n\034DELETE_COLU" +
+ "MN_FAMILY_PREPARE\020\001\022&\n\"DELETE_COLUMN_FAM" +
+ "ILY_PRE_OPERATION\020\002\0220\n,DELETE_COLUMN_FAM" +
+ "ILY_UPDATE_TABLE_DESCRIPTOR\020\003\022)\n%DELETE_",
+ "COLUMN_FAMILY_DELETE_FS_LAYOUT\020\004\022\'\n#DELE" +
+ "TE_COLUMN_FAMILY_POST_OPERATION\020\005\022+\n\'DEL" +
+ "ETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS\020\006BK" +
+ "\n*org.apache.hadoop.hbase.protobuf.gener" +
+ "atedB\025MasterProcedureProtosH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -3857,6 +7748,24 @@ public final class MasterProcedureProtos {
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_DeleteTableStateData_descriptor,
new java.lang.String[] { "UserInfo", "TableName", "RegionInfo", });
+ internal_static_AddColumnFamilyStateData_descriptor =
+ getDescriptor().getMessageTypes().get(3);
+ internal_static_AddColumnFamilyStateData_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_AddColumnFamilyStateData_descriptor,
+ new java.lang.String[] { "UserInfo", "TableName", "ColumnfamilySchema", "UnmodifiedTableSchema", });
+ internal_static_ModifyColumnFamilyStateData_descriptor =
+ getDescriptor().getMessageTypes().get(4);
+ internal_static_ModifyColumnFamilyStateData_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_ModifyColumnFamilyStateData_descriptor,
+ new java.lang.String[] { "UserInfo", "TableName", "ColumnfamilySchema", "UnmodifiedTableSchema", });
+ internal_static_DeleteColumnFamilyStateData_descriptor =
+ getDescriptor().getMessageTypes().get(5);
+ internal_static_DeleteColumnFamilyStateData_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_DeleteColumnFamilyStateData_descriptor,
+ new java.lang.String[] { "UserInfo", "TableName", "ColumnfamilyName", "UnmodifiedTableSchema", });
return null;
}
};
diff --git a/hbase-protocol/src/main/protobuf/MasterProcedure.proto b/hbase-protocol/src/main/protobuf/MasterProcedure.proto
index 97d1af66637..a07516df450 100644
--- a/hbase-protocol/src/main/protobuf/MasterProcedure.proto
+++ b/hbase-protocol/src/main/protobuf/MasterProcedure.proto
@@ -89,3 +89,49 @@ message DeleteTableStateData {
required TableName table_name = 2;
repeated RegionInfo region_info = 3;
}
+
+enum AddColumnFamilyState {
+ ADD_COLUMN_FAMILY_PREPARE = 1;
+ ADD_COLUMN_FAMILY_PRE_OPERATION = 2;
+ ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3;
+ ADD_COLUMN_FAMILY_POST_OPERATION = 4;
+ ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 5;
+}
+
+message AddColumnFamilyStateData {
+ required UserInformation user_info = 1;
+ required TableName table_name = 2;
+ required ColumnFamilySchema columnfamily_schema = 3;
+ optional TableSchema unmodified_table_schema = 4;
+}
+
+enum ModifyColumnFamilyState {
+ MODIFY_COLUMN_FAMILY_PREPARE = 1;
+ MODIFY_COLUMN_FAMILY_PRE_OPERATION = 2;
+ MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3;
+ MODIFY_COLUMN_FAMILY_POST_OPERATION = 4;
+ MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 5;
+}
+
+message ModifyColumnFamilyStateData {
+ required UserInformation user_info = 1;
+ required TableName table_name = 2;
+ required ColumnFamilySchema columnfamily_schema = 3;
+ optional TableSchema unmodified_table_schema = 4;
+}
+
+enum DeleteColumnFamilyState {
+ DELETE_COLUMN_FAMILY_PREPARE = 1;
+ DELETE_COLUMN_FAMILY_PRE_OPERATION = 2;
+ DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR = 3;
+ DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT = 4;
+ DELETE_COLUMN_FAMILY_POST_OPERATION = 5;
+ DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS = 6;
+}
+
+message DeleteColumnFamilyStateData {
+ required UserInformation user_info = 1;
+ required TableName table_name = 2;
+ required bytes columnfamily_name = 3;
+ optional TableSchema unmodified_table_schema = 4;
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index ba739b23af5..2e330955640 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -90,14 +90,14 @@ import org.apache.hadoop.hbase.master.cleaner.LogCleaner;
import org.apache.hadoop.hbase.master.handler.DisableTableHandler;
import org.apache.hadoop.hbase.master.handler.DispatchMergingRegionHandler;
import org.apache.hadoop.hbase.master.handler.EnableTableHandler;
-import org.apache.hadoop.hbase.master.handler.TableAddFamilyHandler;
-import org.apache.hadoop.hbase.master.handler.TableDeleteFamilyHandler;
-import org.apache.hadoop.hbase.master.handler.TableModifyFamilyHandler;
import org.apache.hadoop.hbase.master.handler.TruncateTableHandler;
+import org.apache.hadoop.hbase.master.procedure.AddColumnFamilyProcedure;
import org.apache.hadoop.hbase.master.procedure.CreateTableProcedure;
+import org.apache.hadoop.hbase.master.procedure.DeleteColumnFamilyProcedure;
import org.apache.hadoop.hbase.master.procedure.DeleteTableProcedure;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureConstants;
import org.apache.hadoop.hbase.master.procedure.MasterProcedureEnv;
+import org.apache.hadoop.hbase.master.procedure.ModifyColumnFamilyProcedure;
import org.apache.hadoop.hbase.master.procedure.ModifyTableProcedure;
import org.apache.hadoop.hbase.master.procedure.ProcedurePrepareLatch;
import org.apache.hadoop.hbase.master.procedure.ProcedureSyncWait;
@@ -1618,8 +1618,11 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
return;
}
}
- //TODO: we should process this (and some others) in an executor
- new TableAddFamilyHandler(tableName, columnDescriptor, this, this).prepare().process();
+ // Execute the operation synchronously - wait for the operation to complete before continuing.
+ long procId =
+ this.procedureExecutor.submitProcedure(new AddColumnFamilyProcedure(procedureExecutor
+ .getEnvironment(), tableName, columnDescriptor));
+ ProcedureSyncWait.waitForProcedureToComplete(procedureExecutor, procId);
if (cpHost != null) {
cpHost.postAddColumn(tableName, columnDescriptor);
}
@@ -1637,8 +1640,13 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
}
}
LOG.info(getClientIdAuditPrefix() + " modify " + descriptor);
- new TableModifyFamilyHandler(tableName, descriptor, this, this)
- .prepare().process();
+
+ // Execute the operation synchronously - wait for the operation to complete before continuing.
+ long procId =
+ this.procedureExecutor.submitProcedure(new ModifyColumnFamilyProcedure(procedureExecutor
+ .getEnvironment(), tableName, descriptor));
+ ProcedureSyncWait.waitForProcedureToComplete(procedureExecutor, procId);
+
if (cpHost != null) {
cpHost.postModifyColumn(tableName, descriptor);
}
@@ -1654,7 +1662,13 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
}
}
LOG.info(getClientIdAuditPrefix() + " delete " + Bytes.toString(columnName));
- new TableDeleteFamilyHandler(tableName, columnName, this, this).prepare().process();
+
+ // Execute the operation synchronously - wait for the operation to complete before continuing.
+ long procId =
+ this.procedureExecutor.submitProcedure(new DeleteColumnFamilyProcedure(procedureExecutor
+ .getEnvironment(), tableName, columnName));
+ ProcedureSyncWait.waitForProcedureToComplete(procedureExecutor, procId);
+
if (cpHost != null) {
cpHost.postDeleteColumn(tableName, columnName);
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableDeleteFamilyHandler.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableDeleteFamilyHandler.java
index 7b5c5c53b54..3bbef0afc2e 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableDeleteFamilyHandler.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/handler/TableDeleteFamilyHandler.java
@@ -21,11 +21,11 @@ package org.apache.hadoop.hbase.master.handler;
import java.io.IOException;
import java.util.List;
-import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.Server;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.executor.EventType;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
@@ -34,7 +34,7 @@ import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.util.Bytes;
/**
- * Handles adding a new family to an existing table.
+ * Handles Deleting a column family from an existing table.
*/
@InterfaceAudience.Private
public class TableDeleteFamilyHandler extends TableEventHandler {
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
new file mode 100644
index 00000000000..98a00c20548
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/AddColumnFamilyProcedure.java
@@ -0,0 +1,409 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.procedure;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.InvalidFamilyOperationException;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.TableState;
+import org.apache.hadoop.hbase.executor.EventType;
+import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.AddColumnFamilyState;
+import org.apache.hadoop.security.UserGroupInformation;
+
+/**
+ * The procedure to add a column family to an existing table.
+ */
+@InterfaceAudience.Private
+public class AddColumnFamilyProcedure
+ extends StateMachineProcedure
+ implements TableProcedureInterface {
+ private static final Log LOG = LogFactory.getLog(AddColumnFamilyProcedure.class);
+
+ private final AtomicBoolean aborted = new AtomicBoolean(false);
+
+ private TableName tableName;
+ private HTableDescriptor unmodifiedHTableDescriptor;
+ private HColumnDescriptor cfDescriptor;
+ private UserGroupInformation user;
+
+ private List regionInfoList;
+ private Boolean traceEnabled;
+
+ public AddColumnFamilyProcedure() {
+ this.unmodifiedHTableDescriptor = null;
+ this.regionInfoList = null;
+ this.traceEnabled = null;
+ }
+
+ public AddColumnFamilyProcedure(
+ final MasterProcedureEnv env,
+ final TableName tableName,
+ final HColumnDescriptor cfDescriptor) throws IOException {
+ this.tableName = tableName;
+ this.cfDescriptor = cfDescriptor;
+ this.user = env.getRequestUser().getUGI();
+ this.unmodifiedHTableDescriptor = null;
+ this.regionInfoList = null;
+ this.traceEnabled = null;
+ }
+
+ @Override
+ protected Flow executeFromState(final MasterProcedureEnv env, final AddColumnFamilyState state) {
+ if (isTraceEnabled()) {
+ LOG.trace(this + " execute state=" + state);
+ }
+
+ try {
+ switch (state) {
+ case ADD_COLUMN_FAMILY_PREPARE:
+ prepareAdd(env);
+ setNextState(AddColumnFamilyState.ADD_COLUMN_FAMILY_PRE_OPERATION);
+ break;
+ case ADD_COLUMN_FAMILY_PRE_OPERATION:
+ preAdd(env, state);
+ setNextState(AddColumnFamilyState.ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR);
+ break;
+ case ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR:
+ updateTableDescriptor(env);
+ setNextState(AddColumnFamilyState.ADD_COLUMN_FAMILY_POST_OPERATION);
+ break;
+ case ADD_COLUMN_FAMILY_POST_OPERATION:
+ postAdd(env, state);
+ setNextState(AddColumnFamilyState.ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS);
+ break;
+ case ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS:
+ reOpenAllRegionsIfTableIsOnline(env);
+ return Flow.NO_MORE_STATE;
+ default:
+ throw new UnsupportedOperationException(this + " unhandled state=" + state);
+ }
+ } catch (InterruptedException|IOException e) {
+ LOG.warn("Error trying to add the column family" + getColumnFamilyName() + " to the table "
+ + tableName + " (in state=" + state + ")", e);
+
+ setFailure("master-add-columnfamily", e);
+ }
+ return Flow.HAS_MORE_STATE;
+ }
+
+ @Override
+ protected void rollbackState(final MasterProcedureEnv env, final AddColumnFamilyState state)
+ throws IOException {
+ if (isTraceEnabled()) {
+ LOG.trace(this + " rollback state=" + state);
+ }
+ try {
+ switch (state) {
+ case ADD_COLUMN_FAMILY_REOPEN_ALL_REGIONS:
+ break; // Nothing to undo.
+ case ADD_COLUMN_FAMILY_POST_OPERATION:
+ // TODO-MAYBE: call the coprocessor event to undo?
+ break;
+ case ADD_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR:
+ restoreTableDescriptor(env);
+ break;
+ case ADD_COLUMN_FAMILY_PRE_OPERATION:
+ // TODO-MAYBE: call the coprocessor event to undo?
+ break;
+ case ADD_COLUMN_FAMILY_PREPARE:
+ break; // nothing to do
+ default:
+ throw new UnsupportedOperationException(this + " unhandled state=" + state);
+ }
+ } catch (IOException e) {
+ // This will be retried. Unless there is a bug in the code,
+ // this should be just a "temporary error" (e.g. network down)
+ LOG.warn("Failed rollback attempt step " + state + " for adding the column family"
+ + getColumnFamilyName() + " to the table " + tableName, e);
+ throw e;
+ }
+ }
+
+ @Override
+ protected AddColumnFamilyState getState(final int stateId) {
+ return AddColumnFamilyState.valueOf(stateId);
+ }
+
+ @Override
+ protected int getStateId(final AddColumnFamilyState state) {
+ return state.getNumber();
+ }
+
+ @Override
+ protected AddColumnFamilyState getInitialState() {
+ return AddColumnFamilyState.ADD_COLUMN_FAMILY_PREPARE;
+ }
+
+ @Override
+ protected void setNextState(AddColumnFamilyState state) {
+ if (aborted.get()) {
+ setAbortFailure("add-columnfamily", "abort requested");
+ } else {
+ super.setNextState(state);
+ }
+ }
+
+ @Override
+ public boolean abort(final MasterProcedureEnv env) {
+ aborted.set(true);
+ return true;
+ }
+
+ @Override
+ protected boolean acquireLock(final MasterProcedureEnv env) {
+ if (!env.isInitialized()) return false;
+ return env.getProcedureQueue().tryAcquireTableWrite(
+ tableName,
+ EventType.C_M_ADD_FAMILY.toString());
+ }
+
+ @Override
+ protected void releaseLock(final MasterProcedureEnv env) {
+ env.getProcedureQueue().releaseTableWrite(tableName);
+ }
+
+ @Override
+ public void serializeStateData(final OutputStream stream) throws IOException {
+ super.serializeStateData(stream);
+
+ MasterProcedureProtos.AddColumnFamilyStateData.Builder addCFMsg =
+ MasterProcedureProtos.AddColumnFamilyStateData.newBuilder()
+ .setUserInfo(MasterProcedureUtil.toProtoUserInfo(user))
+ .setTableName(ProtobufUtil.toProtoTableName(tableName))
+ .setColumnfamilySchema(cfDescriptor.convert());
+ if (unmodifiedHTableDescriptor != null) {
+ addCFMsg.setUnmodifiedTableSchema(unmodifiedHTableDescriptor.convert());
+ }
+
+ addCFMsg.build().writeDelimitedTo(stream);
+ }
+
+ @Override
+ public void deserializeStateData(final InputStream stream) throws IOException {
+ super.deserializeStateData(stream);
+
+ MasterProcedureProtos.AddColumnFamilyStateData addCFMsg =
+ MasterProcedureProtos.AddColumnFamilyStateData.parseDelimitedFrom(stream);
+ user = MasterProcedureUtil.toUserInfo(addCFMsg.getUserInfo());
+ tableName = ProtobufUtil.toTableName(addCFMsg.getTableName());
+ cfDescriptor = HColumnDescriptor.convert(addCFMsg.getColumnfamilySchema());
+ if (addCFMsg.hasUnmodifiedTableSchema()) {
+ unmodifiedHTableDescriptor = HTableDescriptor.convert(addCFMsg.getUnmodifiedTableSchema());
+ }
+ }
+
+ @Override
+ public void toStringClassDetails(StringBuilder sb) {
+ sb.append(getClass().getSimpleName());
+ sb.append(" (table=");
+ sb.append(tableName);
+ sb.append(", columnfamily=");
+ if (cfDescriptor != null) {
+ sb.append(getColumnFamilyName());
+ } else {
+ sb.append("Unknown");
+ }
+ sb.append(") user=");
+ sb.append(user);
+ }
+
+ @Override
+ public TableName getTableName() {
+ return tableName;
+ }
+
+ @Override
+ public TableOperationType getTableOperationType() {
+ return TableOperationType.EDIT;
+ }
+
+ /**
+ * Action before any real action of adding column family.
+ * @param env MasterProcedureEnv
+ * @throws IOException
+ */
+ private void prepareAdd(final MasterProcedureEnv env) throws IOException {
+ // Checks whether the table is allowed to be modified.
+ MasterDDLOperationHelper.checkTableModifiable(env, tableName);
+
+ // In order to update the descriptor, we need to retrieve the old descriptor for comparison.
+ unmodifiedHTableDescriptor = env.getMasterServices().getTableDescriptors().get(tableName);
+ if (unmodifiedHTableDescriptor == null) {
+ throw new IOException("HTableDescriptor missing for " + tableName);
+ }
+ if (unmodifiedHTableDescriptor.hasFamily(cfDescriptor.getName())) {
+ throw new InvalidFamilyOperationException("Column family '" + getColumnFamilyName()
+ + "' in table '" + tableName + "' already exists so cannot be added");
+ }
+ }
+
+ /**
+ * Action before adding column family.
+ * @param env MasterProcedureEnv
+ * @param state the procedure state
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ private void preAdd(final MasterProcedureEnv env, final AddColumnFamilyState state)
+ throws IOException, InterruptedException {
+ runCoprocessorAction(env, state);
+ }
+
+ /**
+ * Add the column family to the file system
+ */
+ private void updateTableDescriptor(final MasterProcedureEnv env) throws IOException {
+ // Update table descriptor
+ LOG.info("AddColumn. Table = " + tableName + " HCD = " + cfDescriptor.toString());
+
+ HTableDescriptor htd = env.getMasterServices().getTableDescriptors().get(tableName);
+
+ if (htd.hasFamily(cfDescriptor.getName())) {
+ // It is possible to reach this situation, as we could already add the column family
+ // to table descriptor, but the master failover happens before we complete this state.
+ // We should be able to handle running this function multiple times without causing problem.
+ return;
+ }
+
+ htd.addFamily(cfDescriptor);
+ env.getMasterServices().getTableDescriptors().add(htd);
+ }
+
+ /**
+ * Restore the table descriptor back to pre-add
+ * @param env MasterProcedureEnv
+ * @throws IOException
+ **/
+ private void restoreTableDescriptor(final MasterProcedureEnv env) throws IOException {
+ HTableDescriptor htd = env.getMasterServices().getTableDescriptors().get(tableName);
+ if (htd.hasFamily(cfDescriptor.getName())) {
+ // Remove the column family from file system and update the table descriptor to
+ // the before-add-column-family-state
+ MasterDDLOperationHelper.deleteColumnFamilyFromFileSystem(env, tableName,
+ getRegionInfoList(env), cfDescriptor.getName());
+
+ env.getMasterServices().getTableDescriptors().add(unmodifiedHTableDescriptor);
+
+ // Make sure regions are opened after table descriptor is updated.
+ reOpenAllRegionsIfTableIsOnline(env);
+ }
+ }
+
+ /**
+ * Action after adding column family.
+ * @param env MasterProcedureEnv
+ * @param state the procedure state
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ private void postAdd(final MasterProcedureEnv env, final AddColumnFamilyState state)
+ throws IOException, InterruptedException {
+ runCoprocessorAction(env, state);
+ }
+
+ /**
+ * Last action from the procedure - executed when online schema change is supported.
+ * @param env MasterProcedureEnv
+ * @throws IOException
+ */
+ private void reOpenAllRegionsIfTableIsOnline(final MasterProcedureEnv env) throws IOException {
+ // This operation only run when the table is enabled.
+ if (!env.getMasterServices().getAssignmentManager().getTableStateManager()
+ .isTableState(getTableName(), TableState.State.ENABLED)) {
+ return;
+ }
+
+ if (MasterDDLOperationHelper.reOpenAllRegions(env, getTableName(), getRegionInfoList(env))) {
+ LOG.info("Completed add column family operation on table " + getTableName());
+ } else {
+ LOG.warn("Error on reopening the regions on table " + getTableName());
+ }
+ }
+
+ /**
+ * The procedure could be restarted from a different machine. If the variable is null, we need to
+ * retrieve it.
+ * @return traceEnabled
+ */
+ private Boolean isTraceEnabled() {
+ if (traceEnabled == null) {
+ traceEnabled = LOG.isTraceEnabled();
+ }
+ return traceEnabled;
+ }
+
+ private String getColumnFamilyName() {
+ return cfDescriptor.getNameAsString();
+ }
+
+ /**
+ * Coprocessor Action.
+ * @param env MasterProcedureEnv
+ * @param state the procedure state
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ private void runCoprocessorAction(final MasterProcedureEnv env, final AddColumnFamilyState state)
+ throws IOException, InterruptedException {
+ final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
+ if (cpHost != null) {
+ user.doAs(new PrivilegedExceptionAction() {
+ @Override
+ public Void run() throws Exception {
+ switch (state) {
+ case ADD_COLUMN_FAMILY_PRE_OPERATION:
+ cpHost.preAddColumnHandler(tableName, cfDescriptor);
+ break;
+ case ADD_COLUMN_FAMILY_POST_OPERATION:
+ cpHost.postAddColumnHandler(tableName, cfDescriptor);
+ break;
+ default:
+ throw new UnsupportedOperationException(this + " unhandled state=" + state);
+ }
+ return null;
+ }
+ });
+ }
+ }
+
+ private List getRegionInfoList(final MasterProcedureEnv env) throws IOException {
+ if (regionInfoList == null) {
+ regionInfoList = ProcedureSyncWait.getRegionsFromMeta(env, getTableName());
+ }
+ return regionInfoList;
+ }
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
new file mode 100644
index 00000000000..a053c8944a0
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/DeleteColumnFamilyProcedure.java
@@ -0,0 +1,441 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.procedure;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.InvalidFamilyOperationException;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.TableState;
+import org.apache.hadoop.hbase.executor.EventType;
+import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.DeleteColumnFamilyState;
+import org.apache.hadoop.hbase.util.ByteStringer;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.security.UserGroupInformation;
+
+/**
+ * The procedure to delete a column family from an existing table.
+ */
+@InterfaceAudience.Private
+public class DeleteColumnFamilyProcedure
+ extends StateMachineProcedure
+ implements TableProcedureInterface {
+ private static final Log LOG = LogFactory.getLog(DeleteColumnFamilyProcedure.class);
+
+ private final AtomicBoolean aborted = new AtomicBoolean(false);
+
+ private HTableDescriptor unmodifiedHTableDescriptor;
+ private TableName tableName;
+ private byte [] familyName;
+ private UserGroupInformation user;
+
+ private List regionInfoList;
+ private Boolean traceEnabled;
+
+ public DeleteColumnFamilyProcedure() {
+ this.unmodifiedHTableDescriptor = null;
+ this.regionInfoList = null;
+ this.traceEnabled = null;
+ }
+
+ public DeleteColumnFamilyProcedure(
+ final MasterProcedureEnv env,
+ final TableName tableName,
+ final byte[] familyName) throws IOException {
+ this.tableName = tableName;
+ this.familyName = familyName;
+ this.user = env.getRequestUser().getUGI();
+ this.unmodifiedHTableDescriptor = null;
+ this.regionInfoList = null;
+ this.traceEnabled = null;
+ }
+
+ @Override
+ protected Flow executeFromState(final MasterProcedureEnv env, DeleteColumnFamilyState state) {
+ if (isTraceEnabled()) {
+ LOG.trace(this + " execute state=" + state);
+ }
+
+ try {
+ switch (state) {
+ case DELETE_COLUMN_FAMILY_PREPARE:
+ prepareDelete(env);
+ setNextState(DeleteColumnFamilyState.DELETE_COLUMN_FAMILY_PRE_OPERATION);
+ break;
+ case DELETE_COLUMN_FAMILY_PRE_OPERATION:
+ preDelete(env, state);
+ setNextState(DeleteColumnFamilyState.DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR);
+ break;
+ case DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR:
+ updateTableDescriptor(env);
+ setNextState(DeleteColumnFamilyState.DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT);
+ break;
+ case DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT:
+ deleteFromFs(env);
+ setNextState(DeleteColumnFamilyState.DELETE_COLUMN_FAMILY_POST_OPERATION);
+ break;
+ case DELETE_COLUMN_FAMILY_POST_OPERATION:
+ postDelete(env, state);
+ setNextState(DeleteColumnFamilyState.DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS);
+ break;
+ case DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS:
+ reOpenAllRegionsIfTableIsOnline(env);
+ return Flow.NO_MORE_STATE;
+ default:
+ throw new UnsupportedOperationException(this + " unhandled state=" + state);
+ }
+ } catch (InterruptedException|IOException e) {
+ if (!isRollbackSupported(state)) {
+ // We reach a state that cannot be rolled back. We just need to keep retry.
+ LOG.warn("Error trying to delete the column family " + getColumnFamilyName()
+ + " from table " + tableName + "(in state=" + state + ")", e);
+ } else {
+ LOG.error("Error trying to delete the column family " + getColumnFamilyName()
+ + " from table " + tableName + "(in state=" + state + ")", e);
+ setFailure("master-delete-column-family", e);
+ }
+ }
+ return Flow.HAS_MORE_STATE;
+ }
+
+ @Override
+ protected void rollbackState(final MasterProcedureEnv env, final DeleteColumnFamilyState state)
+ throws IOException {
+ if (isTraceEnabled()) {
+ LOG.trace(this + " rollback state=" + state);
+ }
+ try {
+ switch (state) {
+ case DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS:
+ break; // Nothing to undo.
+ case DELETE_COLUMN_FAMILY_POST_OPERATION:
+ // TODO-MAYBE: call the coprocessor event to undo?
+ break;
+ case DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT:
+ // Once we reach to this state - we could NOT rollback - as it is tricky to undelete
+ // the deleted files. We are not suppose to reach here, throw exception so that we know
+ // there is a code bug to investigate.
+ throw new UnsupportedOperationException(this + " rollback of state=" + state
+ + " is unsupported.");
+ case DELETE_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR:
+ restoreTableDescriptor(env);
+ break;
+ case DELETE_COLUMN_FAMILY_PRE_OPERATION:
+ // TODO-MAYBE: call the coprocessor event to undo?
+ break;
+ case DELETE_COLUMN_FAMILY_PREPARE:
+ break; // nothing to do
+ default:
+ throw new UnsupportedOperationException(this + " unhandled state=" + state);
+ }
+ } catch (IOException e) {
+ // This will be retried. Unless there is a bug in the code,
+ // this should be just a "temporary error" (e.g. network down)
+ LOG.warn("Failed rollback attempt step " + state + " for deleting the column family"
+ + getColumnFamilyName() + " to the table " + tableName, e);
+ throw e;
+ }
+ }
+
+ @Override
+ protected DeleteColumnFamilyState getState(final int stateId) {
+ return DeleteColumnFamilyState.valueOf(stateId);
+ }
+
+ @Override
+ protected int getStateId(final DeleteColumnFamilyState state) {
+ return state.getNumber();
+ }
+
+ @Override
+ protected DeleteColumnFamilyState getInitialState() {
+ return DeleteColumnFamilyState.DELETE_COLUMN_FAMILY_PREPARE;
+ }
+
+ @Override
+ protected void setNextState(DeleteColumnFamilyState state) {
+ if (aborted.get() && isRollbackSupported(state)) {
+ setAbortFailure("delete-columnfamily", "abort requested");
+ } else {
+ super.setNextState(state);
+ }
+ }
+
+ @Override
+ public boolean abort(final MasterProcedureEnv env) {
+ aborted.set(true);
+ return true;
+ }
+
+ @Override
+ protected boolean acquireLock(final MasterProcedureEnv env) {
+ if (!env.isInitialized()) return false;
+ return env.getProcedureQueue().tryAcquireTableWrite(
+ tableName,
+ EventType.C_M_DELETE_FAMILY.toString());
+ }
+
+ @Override
+ protected void releaseLock(final MasterProcedureEnv env) {
+ env.getProcedureQueue().releaseTableWrite(tableName);
+ }
+
+ @Override
+ public void serializeStateData(final OutputStream stream) throws IOException {
+ super.serializeStateData(stream);
+
+ MasterProcedureProtos.DeleteColumnFamilyStateData.Builder deleteCFMsg =
+ MasterProcedureProtos.DeleteColumnFamilyStateData.newBuilder()
+ .setUserInfo(MasterProcedureUtil.toProtoUserInfo(user))
+ .setTableName(ProtobufUtil.toProtoTableName(tableName))
+ .setColumnfamilyName(ByteStringer.wrap(familyName));
+ if (unmodifiedHTableDescriptor != null) {
+ deleteCFMsg.setUnmodifiedTableSchema(unmodifiedHTableDescriptor.convert());
+ }
+
+ deleteCFMsg.build().writeDelimitedTo(stream);
+ }
+
+ @Override
+ public void deserializeStateData(final InputStream stream) throws IOException {
+ super.deserializeStateData(stream);
+ MasterProcedureProtos.DeleteColumnFamilyStateData deleteCFMsg =
+ MasterProcedureProtos.DeleteColumnFamilyStateData.parseDelimitedFrom(stream);
+ user = MasterProcedureUtil.toUserInfo(deleteCFMsg.getUserInfo());
+ tableName = ProtobufUtil.toTableName(deleteCFMsg.getTableName());
+ familyName = deleteCFMsg.getColumnfamilyName().toByteArray();
+
+ if (deleteCFMsg.hasUnmodifiedTableSchema()) {
+ unmodifiedHTableDescriptor = HTableDescriptor.convert(deleteCFMsg.getUnmodifiedTableSchema());
+ }
+ }
+
+ @Override
+ public void toStringClassDetails(StringBuilder sb) {
+ sb.append(getClass().getSimpleName());
+ sb.append(" (table=");
+ sb.append(tableName);
+ sb.append(", columnfamily=");
+ if (familyName != null) {
+ sb.append(getColumnFamilyName());
+ } else {
+ sb.append("Unknown");
+ }
+ sb.append(") user=");
+ sb.append(user);
+ }
+
+ @Override
+ public TableName getTableName() {
+ return tableName;
+ }
+
+ @Override
+ public TableOperationType getTableOperationType() {
+ return TableOperationType.EDIT;
+ }
+
+ /**
+ * Action before any real action of deleting column family.
+ * @param env MasterProcedureEnv
+ * @throws IOException
+ */
+ private void prepareDelete(final MasterProcedureEnv env) throws IOException {
+ // Checks whether the table is allowed to be modified.
+ MasterDDLOperationHelper.checkTableModifiable(env, tableName);
+
+ // In order to update the descriptor, we need to retrieve the old descriptor for comparison.
+ unmodifiedHTableDescriptor = env.getMasterServices().getTableDescriptors().get(tableName);
+ if (unmodifiedHTableDescriptor == null) {
+ throw new IOException("HTableDescriptor missing for " + tableName);
+ }
+ if (!unmodifiedHTableDescriptor.hasFamily(familyName)) {
+ throw new InvalidFamilyOperationException("Family '" + getColumnFamilyName()
+ + "' does not exist, so it cannot be deleted");
+ }
+ }
+
+ /**
+ * Action before deleting column family.
+ * @param env MasterProcedureEnv
+ * @param state the procedure state
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ private void preDelete(final MasterProcedureEnv env, final DeleteColumnFamilyState state)
+ throws IOException, InterruptedException {
+ runCoprocessorAction(env, state);
+ }
+
+ /**
+ * Remove the column family from the file system and update the table descriptor
+ */
+ private void updateTableDescriptor(final MasterProcedureEnv env) throws IOException {
+ // Update table descriptor
+ LOG.info("DeleteColumn. Table = " + tableName + " family = " + getColumnFamilyName());
+
+ HTableDescriptor htd = env.getMasterServices().getTableDescriptors().get(tableName);
+
+ if (!htd.hasFamily(familyName)) {
+ // It is possible to reach this situation, as we could already delete the column family
+ // from table descriptor, but the master failover happens before we complete this state.
+ // We should be able to handle running this function multiple times without causing problem.
+ return;
+ }
+
+ htd.removeFamily(familyName);
+ env.getMasterServices().getTableDescriptors().add(htd);
+ }
+
+ /**
+ * Restore back to the old descriptor
+ * @param env MasterProcedureEnv
+ * @throws IOException
+ **/
+ private void restoreTableDescriptor(final MasterProcedureEnv env) throws IOException {
+ env.getMasterServices().getTableDescriptors().add(unmodifiedHTableDescriptor);
+
+ // Make sure regions are opened after table descriptor is updated.
+ reOpenAllRegionsIfTableIsOnline(env);
+ }
+
+ /**
+ * Remove the column family from the file system
+ **/
+ private void deleteFromFs(final MasterProcedureEnv env) throws IOException {
+ MasterDDLOperationHelper.deleteColumnFamilyFromFileSystem(env, tableName,
+ getRegionInfoList(env), familyName);
+ }
+
+ /**
+ * Action after deleting column family.
+ * @param env MasterProcedureEnv
+ * @param state the procedure state
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ private void postDelete(final MasterProcedureEnv env, final DeleteColumnFamilyState state)
+ throws IOException, InterruptedException {
+ runCoprocessorAction(env, state);
+ }
+
+ /**
+ * Last action from the procedure - executed when online schema change is supported.
+ * @param env MasterProcedureEnv
+ * @throws IOException
+ */
+ private void reOpenAllRegionsIfTableIsOnline(final MasterProcedureEnv env) throws IOException {
+ // This operation only run when the table is enabled.
+ if (!env.getMasterServices().getAssignmentManager().getTableStateManager()
+ .isTableState(getTableName(), TableState.State.ENABLED)) {
+ return;
+ }
+
+ if (MasterDDLOperationHelper.reOpenAllRegions(env, getTableName(), getRegionInfoList(env))) {
+ LOG.info("Completed delete column family operation on table " + getTableName());
+ } else {
+ LOG.warn("Error on reopening the regions on table " + getTableName());
+ }
+ }
+
+ /**
+ * The procedure could be restarted from a different machine. If the variable is null, we need to
+ * retrieve it.
+ * @return traceEnabled
+ */
+ private Boolean isTraceEnabled() {
+ if (traceEnabled == null) {
+ traceEnabled = LOG.isTraceEnabled();
+ }
+ return traceEnabled;
+ }
+
+ private String getColumnFamilyName() {
+ return Bytes.toString(familyName);
+ }
+
+ /**
+ * Coprocessor Action.
+ * @param env MasterProcedureEnv
+ * @param state the procedure state
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ private void runCoprocessorAction(final MasterProcedureEnv env,
+ final DeleteColumnFamilyState state) throws IOException, InterruptedException {
+ final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
+ if (cpHost != null) {
+ user.doAs(new PrivilegedExceptionAction() {
+ @Override
+ public Void run() throws Exception {
+ switch (state) {
+ case DELETE_COLUMN_FAMILY_PRE_OPERATION:
+ cpHost.preDeleteColumnHandler(tableName, familyName);
+ break;
+ case DELETE_COLUMN_FAMILY_POST_OPERATION:
+ cpHost.postDeleteColumnHandler(tableName, familyName);
+ break;
+ default:
+ throw new UnsupportedOperationException(this + " unhandled state=" + state);
+ }
+ return null;
+ }
+ });
+ }
+ }
+
+ /*
+ * Check whether we are in the state that can be rollback
+ */
+ private boolean isRollbackSupported(final DeleteColumnFamilyState state) {
+ switch (state) {
+ case DELETE_COLUMN_FAMILY_REOPEN_ALL_REGIONS:
+ case DELETE_COLUMN_FAMILY_POST_OPERATION:
+ case DELETE_COLUMN_FAMILY_DELETE_FS_LAYOUT:
+ // It is not safe to rollback if we reach to these states.
+ return false;
+ default:
+ break;
+ }
+ return true;
+ }
+
+ private List getRegionInfoList(final MasterProcedureEnv env) throws IOException {
+ if (regionInfoList == null) {
+ regionInfoList = ProcedureSyncWait.getRegionsFromMeta(env, getTableName());
+ }
+ return regionInfoList;
+ }
+}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
new file mode 100644
index 00000000000..138ebd84958
--- /dev/null
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/procedure/ModifyColumnFamilyProcedure.java
@@ -0,0 +1,384 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.master.procedure;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.InvalidFamilyOperationException;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.client.TableState;
+import org.apache.hadoop.hbase.executor.EventType;
+import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
+import org.apache.hadoop.hbase.procedure2.StateMachineProcedure;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos;
+import org.apache.hadoop.hbase.protobuf.generated.MasterProcedureProtos.ModifyColumnFamilyState;
+import org.apache.hadoop.security.UserGroupInformation;
+
+/**
+ * The procedure to modify a column family from an existing table.
+ */
+@InterfaceAudience.Private
+public class ModifyColumnFamilyProcedure
+ extends StateMachineProcedure
+ implements TableProcedureInterface {
+ private static final Log LOG = LogFactory.getLog(ModifyColumnFamilyProcedure.class);
+
+ private final AtomicBoolean aborted = new AtomicBoolean(false);
+
+ private TableName tableName;
+ private HTableDescriptor unmodifiedHTableDescriptor;
+ private HColumnDescriptor cfDescriptor;
+ private UserGroupInformation user;
+
+ private Boolean traceEnabled;
+
+ public ModifyColumnFamilyProcedure() {
+ this.unmodifiedHTableDescriptor = null;
+ this.traceEnabled = null;
+ }
+
+ public ModifyColumnFamilyProcedure(
+ final MasterProcedureEnv env,
+ final TableName tableName,
+ final HColumnDescriptor cfDescriptor) throws IOException {
+ this.tableName = tableName;
+ this.cfDescriptor = cfDescriptor;
+ this.user = env.getRequestUser().getUGI();
+ this.unmodifiedHTableDescriptor = null;
+ this.traceEnabled = null;
+ }
+
+ @Override
+ protected Flow executeFromState(final MasterProcedureEnv env,
+ final ModifyColumnFamilyState state) {
+ if (isTraceEnabled()) {
+ LOG.trace(this + " execute state=" + state);
+ }
+
+ try {
+ switch (state) {
+ case MODIFY_COLUMN_FAMILY_PREPARE:
+ prepareModify(env);
+ setNextState(ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_PRE_OPERATION);
+ break;
+ case MODIFY_COLUMN_FAMILY_PRE_OPERATION:
+ preModify(env, state);
+ setNextState(ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR);
+ break;
+ case MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR:
+ updateTableDescriptor(env);
+ setNextState(ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_POST_OPERATION);
+ break;
+ case MODIFY_COLUMN_FAMILY_POST_OPERATION:
+ postModify(env, state);
+ setNextState(ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS);
+ break;
+ case MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS:
+ reOpenAllRegionsIfTableIsOnline(env);
+ return Flow.NO_MORE_STATE;
+ default:
+ throw new UnsupportedOperationException(this + " unhandled state=" + state);
+ }
+ } catch (InterruptedException|IOException e) {
+ LOG.warn("Error trying to modify the column family " + getColumnFamilyName()
+ + " of the table " + tableName + "(in state=" + state + ")", e);
+
+ setFailure("master-modify-columnfamily", e);
+ }
+ return Flow.HAS_MORE_STATE;
+ }
+
+ @Override
+ protected void rollbackState(final MasterProcedureEnv env, final ModifyColumnFamilyState state)
+ throws IOException {
+ if (isTraceEnabled()) {
+ LOG.trace(this + " rollback state=" + state);
+ }
+ try {
+ switch (state) {
+ case MODIFY_COLUMN_FAMILY_REOPEN_ALL_REGIONS:
+ break; // Nothing to undo.
+ case MODIFY_COLUMN_FAMILY_POST_OPERATION:
+ // TODO-MAYBE: call the coprocessor event to undo?
+ break;
+ case MODIFY_COLUMN_FAMILY_UPDATE_TABLE_DESCRIPTOR:
+ restoreTableDescriptor(env);
+ break;
+ case MODIFY_COLUMN_FAMILY_PRE_OPERATION:
+ // TODO-MAYBE: call the coprocessor event to undo?
+ break;
+ case MODIFY_COLUMN_FAMILY_PREPARE:
+ break; // nothing to do
+ default:
+ throw new UnsupportedOperationException(this + " unhandled state=" + state);
+ }
+ } catch (IOException e) {
+ // This will be retried. Unless there is a bug in the code,
+ // this should be just a "temporary error" (e.g. network down)
+ LOG.warn("Failed rollback attempt step " + state + " for adding the column family"
+ + getColumnFamilyName() + " to the table " + tableName, e);
+ throw e;
+ }
+ }
+
+ @Override
+ protected ModifyColumnFamilyState getState(final int stateId) {
+ return ModifyColumnFamilyState.valueOf(stateId);
+ }
+
+ @Override
+ protected int getStateId(final ModifyColumnFamilyState state) {
+ return state.getNumber();
+ }
+
+ @Override
+ protected ModifyColumnFamilyState getInitialState() {
+ return ModifyColumnFamilyState.MODIFY_COLUMN_FAMILY_PREPARE;
+ }
+
+ @Override
+ protected void setNextState(ModifyColumnFamilyState state) {
+ if (aborted.get()) {
+ setAbortFailure("modify-columnfamily", "abort requested");
+ } else {
+ super.setNextState(state);
+ }
+ }
+
+ @Override
+ public boolean abort(final MasterProcedureEnv env) {
+ aborted.set(true);
+ return true;
+ }
+
+ @Override
+ protected boolean acquireLock(final MasterProcedureEnv env) {
+ if (!env.isInitialized()) return false;
+ return env.getProcedureQueue().tryAcquireTableWrite(
+ tableName,
+ EventType.C_M_MODIFY_FAMILY.toString());
+ }
+
+ @Override
+ protected void releaseLock(final MasterProcedureEnv env) {
+ env.getProcedureQueue().releaseTableWrite(tableName);
+ }
+
+ @Override
+ public void serializeStateData(final OutputStream stream) throws IOException {
+ super.serializeStateData(stream);
+
+ MasterProcedureProtos.ModifyColumnFamilyStateData.Builder modifyCFMsg =
+ MasterProcedureProtos.ModifyColumnFamilyStateData.newBuilder()
+ .setUserInfo(MasterProcedureUtil.toProtoUserInfo(user))
+ .setTableName(ProtobufUtil.toProtoTableName(tableName))
+ .setColumnfamilySchema(cfDescriptor.convert());
+ if (unmodifiedHTableDescriptor != null) {
+ modifyCFMsg.setUnmodifiedTableSchema(unmodifiedHTableDescriptor.convert());
+ }
+
+ modifyCFMsg.build().writeDelimitedTo(stream);
+ }
+
+ @Override
+ public void deserializeStateData(final InputStream stream) throws IOException {
+ super.deserializeStateData(stream);
+
+ MasterProcedureProtos.ModifyColumnFamilyStateData modifyCFMsg =
+ MasterProcedureProtos.ModifyColumnFamilyStateData.parseDelimitedFrom(stream);
+ user = MasterProcedureUtil.toUserInfo(modifyCFMsg.getUserInfo());
+ tableName = ProtobufUtil.toTableName(modifyCFMsg.getTableName());
+ cfDescriptor = HColumnDescriptor.convert(modifyCFMsg.getColumnfamilySchema());
+ if (modifyCFMsg.hasUnmodifiedTableSchema()) {
+ unmodifiedHTableDescriptor = HTableDescriptor.convert(modifyCFMsg.getUnmodifiedTableSchema());
+ }
+ }
+
+ @Override
+ public void toStringClassDetails(StringBuilder sb) {
+ sb.append(getClass().getSimpleName());
+ sb.append(" (table=");
+ sb.append(tableName);
+ sb.append(", columnfamily=");
+ if (cfDescriptor != null) {
+ sb.append(getColumnFamilyName());
+ } else {
+ sb.append("Unknown");
+ }
+ sb.append(") user=");
+ sb.append(user);
+ }
+
+ @Override
+ public TableName getTableName() {
+ return tableName;
+ }
+
+ @Override
+ public TableOperationType getTableOperationType() {
+ return TableOperationType.EDIT;
+ }
+
+ /**
+ * Action before any real action of modifying column family.
+ * @param env MasterProcedureEnv
+ * @throws IOException
+ */
+ private void prepareModify(final MasterProcedureEnv env) throws IOException {
+ // Checks whether the table is allowed to be modified.
+ MasterDDLOperationHelper.checkTableModifiable(env, tableName);
+
+ unmodifiedHTableDescriptor = env.getMasterServices().getTableDescriptors().get(tableName);
+ if (unmodifiedHTableDescriptor == null) {
+ throw new IOException("HTableDescriptor missing for " + tableName);
+ }
+ if (!unmodifiedHTableDescriptor.hasFamily(cfDescriptor.getName())) {
+ throw new InvalidFamilyOperationException("Family '" + getColumnFamilyName()
+ + "' does not exist, so it cannot be modified");
+ }
+ }
+
+ /**
+ * Action before modifying column family.
+ * @param env MasterProcedureEnv
+ * @param state the procedure state
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ private void preModify(final MasterProcedureEnv env, final ModifyColumnFamilyState state)
+ throws IOException, InterruptedException {
+ runCoprocessorAction(env, state);
+ }
+
+ /**
+ * Modify the column family from the file system
+ */
+ private void updateTableDescriptor(final MasterProcedureEnv env) throws IOException {
+ // Update table descriptor
+ LOG.info("ModifyColumnFamily. Table = " + tableName + " HCD = " + cfDescriptor.toString());
+
+ HTableDescriptor htd = env.getMasterServices().getTableDescriptors().get(tableName);
+ htd.modifyFamily(cfDescriptor);
+ env.getMasterServices().getTableDescriptors().add(htd);
+ }
+
+ /**
+ * Restore back to the old descriptor
+ * @param env MasterProcedureEnv
+ * @throws IOException
+ **/
+ private void restoreTableDescriptor(final MasterProcedureEnv env) throws IOException {
+ env.getMasterServices().getTableDescriptors().add(unmodifiedHTableDescriptor);
+
+ // Make sure regions are opened after table descriptor is updated.
+ reOpenAllRegionsIfTableIsOnline(env);
+ }
+
+ /**
+ * Action after modifying column family.
+ * @param env MasterProcedureEnv
+ * @param state the procedure state
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ private void postModify(final MasterProcedureEnv env, final ModifyColumnFamilyState state)
+ throws IOException, InterruptedException {
+ runCoprocessorAction(env, state);
+ }
+
+ /**
+ * Last action from the procedure - executed when online schema change is supported.
+ * @param env MasterProcedureEnv
+ * @throws IOException
+ */
+ private void reOpenAllRegionsIfTableIsOnline(final MasterProcedureEnv env) throws IOException {
+ // This operation only run when the table is enabled.
+ if (!env.getMasterServices().getAssignmentManager().getTableStateManager()
+ .isTableState(getTableName(), TableState.State.ENABLED)) {
+ return;
+ }
+
+ List regionInfoList = ProcedureSyncWait.getRegionsFromMeta(env, getTableName());
+ if (MasterDDLOperationHelper.reOpenAllRegions(env, getTableName(), regionInfoList)) {
+ LOG.info("Completed add column family operation on table " + getTableName());
+ } else {
+ LOG.warn("Error on reopening the regions on table " + getTableName());
+ }
+ }
+
+ /**
+ * The procedure could be restarted from a different machine. If the variable is null, we need to
+ * retrieve it.
+ * @return traceEnabled
+ */
+ private Boolean isTraceEnabled() {
+ if (traceEnabled == null) {
+ traceEnabled = LOG.isTraceEnabled();
+ }
+ return traceEnabled;
+ }
+
+ private String getColumnFamilyName() {
+ return cfDescriptor.getNameAsString();
+ }
+
+ /**
+ * Coprocessor Action.
+ * @param env MasterProcedureEnv
+ * @param state the procedure state
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ private void runCoprocessorAction(final MasterProcedureEnv env,
+ final ModifyColumnFamilyState state) throws IOException, InterruptedException {
+ final MasterCoprocessorHost cpHost = env.getMasterCoprocessorHost();
+ if (cpHost != null) {
+ user.doAs(new PrivilegedExceptionAction() {
+ @Override
+ public Void run() throws Exception {
+ switch (state) {
+ case MODIFY_COLUMN_FAMILY_PRE_OPERATION:
+ cpHost.preModifyColumnHandler(tableName, cfDescriptor);
+ break;
+ case MODIFY_COLUMN_FAMILY_POST_OPERATION:
+ cpHost.postModifyColumnHandler(tableName, cfDescriptor);
+ break;
+ default:
+ throw new UnsupportedOperationException(this + " unhandled state=" + state);
+ }
+ return null;
+ }
+ });
+ }
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
index a19a97566e0..44b98033070 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestTableLockManager.java
@@ -103,37 +103,6 @@ public class TestTableLockManager {
TEST_UTIL.shutdownMiniCluster();
}
- @Test(timeout = 600000)
- public void testLockTimeoutException() throws Exception {
- Configuration conf = TEST_UTIL.getConfiguration();
- conf.setInt(TableLockManager.TABLE_WRITE_LOCK_TIMEOUT_MS, 3000);
- prepareMiniCluster();
- HMaster master = TEST_UTIL.getHBaseCluster().getMaster();
- master.getMasterCoprocessorHost().load(TestLockTimeoutExceptionMasterObserver.class,
- 0, TEST_UTIL.getConfiguration());
-
- ExecutorService executor = Executors.newSingleThreadExecutor();
- Future