HBASE-7571 add the notion of per-table or per-column family configuration (Sergey)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1438527 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Zhihong Yu 2013-01-25 14:11:45 +00:00
parent a650c87f1f
commit f95692ce65
13 changed files with 983 additions and 123 deletions

View File

@ -454,6 +454,7 @@ public final class HConstants {
public static final String VERSIONS = "VERSIONS"; public static final String VERSIONS = "VERSIONS";
public static final String IN_MEMORY = "IN_MEMORY"; public static final String IN_MEMORY = "IN_MEMORY";
public static final String METADATA = "METADATA"; public static final String METADATA = "METADATA";
public static final String CONFIGURATION = "CONFIGURATION";
/** /**
* This is a retry backoff multiplier table similar to the BSD TCP syn * This is a retry backoff multiplier table similar to the BSD TCP syn

View File

@ -199,6 +199,16 @@ public final class HBaseProtos {
getColumnFamiliesOrBuilderList(); getColumnFamiliesOrBuilderList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(
int index); int index);
// repeated .NameStringPair configuration = 4;
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>
getConfigurationList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index);
int getConfigurationCount();
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index);
} }
public static final class TableSchema extends public static final class TableSchema extends
com.google.protobuf.GeneratedMessage com.google.protobuf.GeneratedMessage
@ -281,10 +291,32 @@ public final class HBaseProtos {
return columnFamilies_.get(index); return columnFamilies_.get(index);
} }
// repeated .NameStringPair configuration = 4;
public static final int CONFIGURATION_FIELD_NUMBER = 4;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
return configuration_;
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList() {
return configuration_;
}
public int getConfigurationCount() {
return configuration_.size();
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
return configuration_.get(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index) {
return configuration_.get(index);
}
private void initFields() { private void initFields() {
name_ = com.google.protobuf.ByteString.EMPTY; name_ = com.google.protobuf.ByteString.EMPTY;
attributes_ = java.util.Collections.emptyList(); attributes_ = java.util.Collections.emptyList();
columnFamilies_ = java.util.Collections.emptyList(); columnFamilies_ = java.util.Collections.emptyList();
configuration_ = java.util.Collections.emptyList();
} }
private byte memoizedIsInitialized = -1; private byte memoizedIsInitialized = -1;
public final boolean isInitialized() { public final boolean isInitialized() {
@ -303,6 +335,12 @@ public final class HBaseProtos {
return false; return false;
} }
} }
for (int i = 0; i < getConfigurationCount(); i++) {
if (!getConfiguration(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1; memoizedIsInitialized = 1;
return true; return true;
} }
@ -319,6 +357,9 @@ public final class HBaseProtos {
for (int i = 0; i < columnFamilies_.size(); i++) { for (int i = 0; i < columnFamilies_.size(); i++) {
output.writeMessage(3, columnFamilies_.get(i)); output.writeMessage(3, columnFamilies_.get(i));
} }
for (int i = 0; i < configuration_.size(); i++) {
output.writeMessage(4, configuration_.get(i));
}
getUnknownFields().writeTo(output); getUnknownFields().writeTo(output);
} }
@ -340,6 +381,10 @@ public final class HBaseProtos {
size += com.google.protobuf.CodedOutputStream size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, columnFamilies_.get(i)); .computeMessageSize(3, columnFamilies_.get(i));
} }
for (int i = 0; i < configuration_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, configuration_.get(i));
}
size += getUnknownFields().getSerializedSize(); size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size; memoizedSerializedSize = size;
return size; return size;
@ -372,6 +417,8 @@ public final class HBaseProtos {
.equals(other.getAttributesList()); .equals(other.getAttributesList());
result = result && getColumnFamiliesList() result = result && getColumnFamiliesList()
.equals(other.getColumnFamiliesList()); .equals(other.getColumnFamiliesList());
result = result && getConfigurationList()
.equals(other.getConfigurationList());
result = result && result = result &&
getUnknownFields().equals(other.getUnknownFields()); getUnknownFields().equals(other.getUnknownFields());
return result; return result;
@ -393,6 +440,10 @@ public final class HBaseProtos {
hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER; hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER;
hash = (53 * hash) + getColumnFamiliesList().hashCode(); hash = (53 * hash) + getColumnFamiliesList().hashCode();
} }
if (getConfigurationCount() > 0) {
hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER;
hash = (53 * hash) + getConfigurationList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode();
return hash; return hash;
} }
@ -503,6 +554,7 @@ public final class HBaseProtos {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getAttributesFieldBuilder(); getAttributesFieldBuilder();
getColumnFamiliesFieldBuilder(); getColumnFamiliesFieldBuilder();
getConfigurationFieldBuilder();
} }
} }
private static Builder create() { private static Builder create() {
@ -525,6 +577,12 @@ public final class HBaseProtos {
} else { } else {
columnFamiliesBuilder_.clear(); columnFamiliesBuilder_.clear();
} }
if (configurationBuilder_ == null) {
configuration_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
configurationBuilder_.clear();
}
return this; return this;
} }
@ -585,6 +643,15 @@ public final class HBaseProtos {
} else { } else {
result.columnFamilies_ = columnFamiliesBuilder_.build(); result.columnFamilies_ = columnFamiliesBuilder_.build();
} }
if (configurationBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008)) {
configuration_ = java.util.Collections.unmodifiableList(configuration_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.configuration_ = configuration_;
} else {
result.configuration_ = configurationBuilder_.build();
}
result.bitField0_ = to_bitField0_; result.bitField0_ = to_bitField0_;
onBuilt(); onBuilt();
return result; return result;
@ -656,6 +723,32 @@ public final class HBaseProtos {
} }
} }
} }
if (configurationBuilder_ == null) {
if (!other.configuration_.isEmpty()) {
if (configuration_.isEmpty()) {
configuration_ = other.configuration_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureConfigurationIsMutable();
configuration_.addAll(other.configuration_);
}
onChanged();
}
} else {
if (!other.configuration_.isEmpty()) {
if (configurationBuilder_.isEmpty()) {
configurationBuilder_.dispose();
configurationBuilder_ = null;
configuration_ = other.configuration_;
bitField0_ = (bitField0_ & ~0x00000008);
configurationBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getConfigurationFieldBuilder() : null;
} else {
configurationBuilder_.addAllMessages(other.configuration_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields()); this.mergeUnknownFields(other.getUnknownFields());
return this; return this;
} }
@ -673,6 +766,12 @@ public final class HBaseProtos {
return false; return false;
} }
} }
for (int i = 0; i < getConfigurationCount(); i++) {
if (!getConfiguration(i).isInitialized()) {
return false;
}
}
return true; return true;
} }
@ -716,6 +815,12 @@ public final class HBaseProtos {
addColumnFamilies(subBuilder.buildPartial()); addColumnFamilies(subBuilder.buildPartial());
break; break;
} }
case 34: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addConfiguration(subBuilder.buildPartial());
break;
}
} }
} }
} }
@ -1118,6 +1223,192 @@ public final class HBaseProtos {
return columnFamiliesBuilder_; return columnFamiliesBuilder_;
} }
// repeated .NameStringPair configuration = 4;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ =
java.util.Collections.emptyList();
private void ensureConfigurationIsMutable() {
if (!((bitField0_ & 0x00000008) == 0x00000008)) {
configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_);
bitField0_ |= 0x00000008;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
if (configurationBuilder_ == null) {
return java.util.Collections.unmodifiableList(configuration_);
} else {
return configurationBuilder_.getMessageList();
}
}
public int getConfigurationCount() {
if (configurationBuilder_ == null) {
return configuration_.size();
} else {
return configurationBuilder_.getCount();
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
if (configurationBuilder_ == null) {
return configuration_.get(index);
} else {
return configurationBuilder_.getMessage(index);
}
}
public Builder setConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.set(index, value);
onChanged();
} else {
configurationBuilder_.setMessage(index, value);
}
return this;
}
public Builder setConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.set(index, builderForValue.build());
onChanged();
} else {
configurationBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.add(value);
onChanged();
} else {
configurationBuilder_.addMessage(value);
}
return this;
}
public Builder addConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.add(index, value);
onChanged();
} else {
configurationBuilder_.addMessage(index, value);
}
return this;
}
public Builder addConfiguration(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.add(builderForValue.build());
onChanged();
} else {
configurationBuilder_.addMessage(builderForValue.build());
}
return this;
}
public Builder addConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.add(index, builderForValue.build());
onChanged();
} else {
configurationBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
public Builder addAllConfiguration(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
super.addAll(values, configuration_);
onChanged();
} else {
configurationBuilder_.addAllMessages(values);
}
return this;
}
public Builder clearConfiguration() {
if (configurationBuilder_ == null) {
configuration_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
configurationBuilder_.clear();
}
return this;
}
public Builder removeConfiguration(int index) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.remove(index);
onChanged();
} else {
configurationBuilder_.remove(index);
}
return this;
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder(
int index) {
return getConfigurationFieldBuilder().getBuilder(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index) {
if (configurationBuilder_ == null) {
return configuration_.get(index); } else {
return configurationBuilder_.getMessageOrBuilder(index);
}
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList() {
if (configurationBuilder_ != null) {
return configurationBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(configuration_);
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() {
return getConfigurationFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder(
int index) {
return getConfigurationFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
}
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder>
getConfigurationBuilderList() {
return getConfigurationFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationFieldBuilder() {
if (configurationBuilder_ == null) {
configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>(
configuration_,
((bitField0_ & 0x00000008) == 0x00000008),
getParentForChildren(),
isClean());
configuration_ = null;
}
return configurationBuilder_;
}
// @@protoc_insertion_point(builder_scope:TableSchema) // @@protoc_insertion_point(builder_scope:TableSchema)
} }
@ -1145,6 +1436,16 @@ public final class HBaseProtos {
getAttributesOrBuilderList(); getAttributesOrBuilderList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder(
int index); int index);
// repeated .NameStringPair configuration = 3;
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>
getConfigurationList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index);
int getConfigurationCount();
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index);
} }
public static final class ColumnFamilySchema extends public static final class ColumnFamilySchema extends
com.google.protobuf.GeneratedMessage com.google.protobuf.GeneratedMessage
@ -1206,9 +1507,31 @@ public final class HBaseProtos {
return attributes_.get(index); return attributes_.get(index);
} }
// repeated .NameStringPair configuration = 3;
public static final int CONFIGURATION_FIELD_NUMBER = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
return configuration_;
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList() {
return configuration_;
}
public int getConfigurationCount() {
return configuration_.size();
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
return configuration_.get(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index) {
return configuration_.get(index);
}
private void initFields() { private void initFields() {
name_ = com.google.protobuf.ByteString.EMPTY; name_ = com.google.protobuf.ByteString.EMPTY;
attributes_ = java.util.Collections.emptyList(); attributes_ = java.util.Collections.emptyList();
configuration_ = java.util.Collections.emptyList();
} }
private byte memoizedIsInitialized = -1; private byte memoizedIsInitialized = -1;
public final boolean isInitialized() { public final boolean isInitialized() {
@ -1225,6 +1548,12 @@ public final class HBaseProtos {
return false; return false;
} }
} }
for (int i = 0; i < getConfigurationCount(); i++) {
if (!getConfiguration(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1; memoizedIsInitialized = 1;
return true; return true;
} }
@ -1238,6 +1567,9 @@ public final class HBaseProtos {
for (int i = 0; i < attributes_.size(); i++) { for (int i = 0; i < attributes_.size(); i++) {
output.writeMessage(2, attributes_.get(i)); output.writeMessage(2, attributes_.get(i));
} }
for (int i = 0; i < configuration_.size(); i++) {
output.writeMessage(3, configuration_.get(i));
}
getUnknownFields().writeTo(output); getUnknownFields().writeTo(output);
} }
@ -1255,6 +1587,10 @@ public final class HBaseProtos {
size += com.google.protobuf.CodedOutputStream size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, attributes_.get(i)); .computeMessageSize(2, attributes_.get(i));
} }
for (int i = 0; i < configuration_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, configuration_.get(i));
}
size += getUnknownFields().getSerializedSize(); size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size; memoizedSerializedSize = size;
return size; return size;
@ -1285,6 +1621,8 @@ public final class HBaseProtos {
} }
result = result && getAttributesList() result = result && getAttributesList()
.equals(other.getAttributesList()); .equals(other.getAttributesList());
result = result && getConfigurationList()
.equals(other.getConfigurationList());
result = result && result = result &&
getUnknownFields().equals(other.getUnknownFields()); getUnknownFields().equals(other.getUnknownFields());
return result; return result;
@ -1302,6 +1640,10 @@ public final class HBaseProtos {
hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER; hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getAttributesList().hashCode(); hash = (53 * hash) + getAttributesList().hashCode();
} }
if (getConfigurationCount() > 0) {
hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER;
hash = (53 * hash) + getConfigurationList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode();
return hash; return hash;
} }
@ -1411,6 +1753,7 @@ public final class HBaseProtos {
private void maybeForceBuilderInitialization() { private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getAttributesFieldBuilder(); getAttributesFieldBuilder();
getConfigurationFieldBuilder();
} }
} }
private static Builder create() { private static Builder create() {
@ -1427,6 +1770,12 @@ public final class HBaseProtos {
} else { } else {
attributesBuilder_.clear(); attributesBuilder_.clear();
} }
if (configurationBuilder_ == null) {
configuration_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
configurationBuilder_.clear();
}
return this; return this;
} }
@ -1478,6 +1827,15 @@ public final class HBaseProtos {
} else { } else {
result.attributes_ = attributesBuilder_.build(); result.attributes_ = attributesBuilder_.build();
} }
if (configurationBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
configuration_ = java.util.Collections.unmodifiableList(configuration_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.configuration_ = configuration_;
} else {
result.configuration_ = configurationBuilder_.build();
}
result.bitField0_ = to_bitField0_; result.bitField0_ = to_bitField0_;
onBuilt(); onBuilt();
return result; return result;
@ -1523,6 +1881,32 @@ public final class HBaseProtos {
} }
} }
} }
if (configurationBuilder_ == null) {
if (!other.configuration_.isEmpty()) {
if (configuration_.isEmpty()) {
configuration_ = other.configuration_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureConfigurationIsMutable();
configuration_.addAll(other.configuration_);
}
onChanged();
}
} else {
if (!other.configuration_.isEmpty()) {
if (configurationBuilder_.isEmpty()) {
configurationBuilder_.dispose();
configurationBuilder_ = null;
configuration_ = other.configuration_;
bitField0_ = (bitField0_ & ~0x00000004);
configurationBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getConfigurationFieldBuilder() : null;
} else {
configurationBuilder_.addAllMessages(other.configuration_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields()); this.mergeUnknownFields(other.getUnknownFields());
return this; return this;
} }
@ -1538,6 +1922,12 @@ public final class HBaseProtos {
return false; return false;
} }
} }
for (int i = 0; i < getConfigurationCount(); i++) {
if (!getConfiguration(i).isInitialized()) {
return false;
}
}
return true; return true;
} }
@ -1575,6 +1965,12 @@ public final class HBaseProtos {
addAttributes(subBuilder.buildPartial()); addAttributes(subBuilder.buildPartial());
break; break;
} }
case 26: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addConfiguration(subBuilder.buildPartial());
break;
}
} }
} }
} }
@ -1791,6 +2187,192 @@ public final class HBaseProtos {
return attributesBuilder_; return attributesBuilder_;
} }
// repeated .NameStringPair configuration = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ =
java.util.Collections.emptyList();
private void ensureConfigurationIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
if (configurationBuilder_ == null) {
return java.util.Collections.unmodifiableList(configuration_);
} else {
return configurationBuilder_.getMessageList();
}
}
public int getConfigurationCount() {
if (configurationBuilder_ == null) {
return configuration_.size();
} else {
return configurationBuilder_.getCount();
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
if (configurationBuilder_ == null) {
return configuration_.get(index);
} else {
return configurationBuilder_.getMessage(index);
}
}
public Builder setConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.set(index, value);
onChanged();
} else {
configurationBuilder_.setMessage(index, value);
}
return this;
}
public Builder setConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.set(index, builderForValue.build());
onChanged();
} else {
configurationBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.add(value);
onChanged();
} else {
configurationBuilder_.addMessage(value);
}
return this;
}
public Builder addConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.add(index, value);
onChanged();
} else {
configurationBuilder_.addMessage(index, value);
}
return this;
}
public Builder addConfiguration(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.add(builderForValue.build());
onChanged();
} else {
configurationBuilder_.addMessage(builderForValue.build());
}
return this;
}
public Builder addConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.add(index, builderForValue.build());
onChanged();
} else {
configurationBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
public Builder addAllConfiguration(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
super.addAll(values, configuration_);
onChanged();
} else {
configurationBuilder_.addAllMessages(values);
}
return this;
}
public Builder clearConfiguration() {
if (configurationBuilder_ == null) {
configuration_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
configurationBuilder_.clear();
}
return this;
}
public Builder removeConfiguration(int index) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.remove(index);
onChanged();
} else {
configurationBuilder_.remove(index);
}
return this;
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder(
int index) {
return getConfigurationFieldBuilder().getBuilder(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index) {
if (configurationBuilder_ == null) {
return configuration_.get(index); } else {
return configurationBuilder_.getMessageOrBuilder(index);
}
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList() {
if (configurationBuilder_ != null) {
return configurationBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(configuration_);
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() {
return getConfigurationFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder(
int index) {
return getConfigurationFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
}
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder>
getConfigurationBuilderList() {
return getConfigurationFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationFieldBuilder() {
if (configurationBuilder_ == null) {
configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>(
configuration_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
configuration_ = null;
}
return configurationBuilder_;
}
// @@protoc_insertion_point(builder_scope:ColumnFamilySchema) // @@protoc_insertion_point(builder_scope:ColumnFamilySchema)
} }
@ -11158,56 +11740,58 @@ public final class HBaseProtos {
descriptor; descriptor;
static { static {
java.lang.String[] descriptorData = { java.lang.String[] descriptorData = {
"\n\013hbase.proto\"m\n\013TableSchema\022\014\n\004name\030\001 \001" + "\n\013hbase.proto\"\225\001\n\013TableSchema\022\014\n\004name\030\001 " +
"(\014\022#\n\nattributes\030\002 \003(\0132\017.BytesBytesPair\022" + "\001(\014\022#\n\nattributes\030\002 \003(\0132\017.BytesBytesPair" +
"+\n\016columnFamilies\030\003 \003(\0132\023.ColumnFamilySc" + "\022+\n\016columnFamilies\030\003 \003(\0132\023.ColumnFamilyS" +
"hema\"G\n\022ColumnFamilySchema\022\014\n\004name\030\001 \002(\014" + "chema\022&\n\rconfiguration\030\004 \003(\0132\017.NameStrin" +
"\022#\n\nattributes\030\002 \003(\0132\017.BytesBytesPair\"s\n" + "gPair\"o\n\022ColumnFamilySchema\022\014\n\004name\030\001 \002(" +
"\nRegionInfo\022\020\n\010regionId\030\001 \002(\004\022\021\n\ttableNa" + "\014\022#\n\nattributes\030\002 \003(\0132\017.BytesBytesPair\022&" +
"me\030\002 \002(\014\022\020\n\010startKey\030\003 \001(\014\022\016\n\006endKey\030\004 \001" + "\n\rconfiguration\030\003 \003(\0132\017.NameStringPair\"s" +
"(\014\022\017\n\007offline\030\005 \001(\010\022\r\n\005split\030\006 \001(\010\"\225\001\n\017R" + "\n\nRegionInfo\022\020\n\010regionId\030\001 \002(\004\022\021\n\ttableN" +
"egionSpecifier\0222\n\004type\030\001 \002(\0162$.RegionSpe" + "ame\030\002 \002(\014\022\020\n\010startKey\030\003 \001(\014\022\016\n\006endKey\030\004 " +
"cifier.RegionSpecifierType\022\r\n\005value\030\002 \002(", "\001(\014\022\017\n\007offline\030\005 \001(\010\022\r\n\005split\030\006 \001(\010\"\225\001\n\017",
"\014\"?\n\023RegionSpecifierType\022\017\n\013REGION_NAME\020" + "RegionSpecifier\0222\n\004type\030\001 \002(\0162$.RegionSp" +
"\001\022\027\n\023ENCODED_REGION_NAME\020\002\"\260\003\n\nRegionLoa" + "ecifier.RegionSpecifierType\022\r\n\005value\030\002 \002" +
"d\022)\n\017regionSpecifier\030\001 \002(\0132\020.RegionSpeci" + "(\014\"?\n\023RegionSpecifierType\022\017\n\013REGION_NAME" +
"fier\022\016\n\006stores\030\002 \001(\r\022\022\n\nstorefiles\030\003 \001(\r" + "\020\001\022\027\n\023ENCODED_REGION_NAME\020\002\"\260\003\n\nRegionLo" +
"\022\037\n\027storeUncompressedSizeMB\030\004 \001(\r\022\027\n\017sto" + "ad\022)\n\017regionSpecifier\030\001 \002(\0132\020.RegionSpec" +
"refileSizeMB\030\005 \001(\r\022\026\n\016memstoreSizeMB\030\006 \001" + "ifier\022\016\n\006stores\030\002 \001(\r\022\022\n\nstorefiles\030\003 \001(" +
"(\r\022\034\n\024storefileIndexSizeMB\030\007 \001(\r\022\031\n\021read" + "\r\022\037\n\027storeUncompressedSizeMB\030\004 \001(\r\022\027\n\017st" +
"RequestsCount\030\010 \001(\004\022\032\n\022writeRequestsCoun" + "orefileSizeMB\030\005 \001(\r\022\026\n\016memstoreSizeMB\030\006 " +
"t\030\t \001(\004\022\032\n\022totalCompactingKVs\030\n \001(\004\022\033\n\023c" + "\001(\r\022\034\n\024storefileIndexSizeMB\030\007 \001(\r\022\031\n\021rea" +
"urrentCompactedKVs\030\013 \001(\004\022\027\n\017rootIndexSiz", "dRequestsCount\030\010 \001(\004\022\032\n\022writeRequestsCou",
"eKB\030\014 \001(\r\022\036\n\026totalStaticIndexSizeKB\030\r \001(" + "nt\030\t \001(\004\022\032\n\022totalCompactingKVs\030\n \001(\004\022\033\n\023" +
"\r\022\036\n\026totalStaticBloomSizeKB\030\016 \001(\r\022\032\n\022com" + "currentCompactedKVs\030\013 \001(\004\022\027\n\017rootIndexSi" +
"pleteSequenceId\030\017 \001(\004\"\372\001\n\nServerLoad\022\030\n\020" + "zeKB\030\014 \001(\r\022\036\n\026totalStaticIndexSizeKB\030\r \001" +
"numberOfRequests\030\001 \001(\r\022\035\n\025totalNumberOfR" + "(\r\022\036\n\026totalStaticBloomSizeKB\030\016 \001(\r\022\032\n\022co" +
"equests\030\002 \001(\r\022\022\n\nusedHeapMB\030\003 \001(\r\022\021\n\tmax" + "mpleteSequenceId\030\017 \001(\004\"\372\001\n\nServerLoad\022\030\n" +
"HeapMB\030\004 \001(\r\022 \n\013regionLoads\030\005 \003(\0132\013.Regi" + "\020numberOfRequests\030\001 \001(\r\022\035\n\025totalNumberOf" +
"onLoad\022\"\n\014coprocessors\030\006 \003(\0132\014.Coprocess" + "Requests\030\002 \001(\r\022\022\n\nusedHeapMB\030\003 \001(\r\022\021\n\tma" +
"or\022\027\n\017reportStartTime\030\007 \001(\004\022\025\n\rreportEnd" + "xHeapMB\030\004 \001(\r\022 \n\013regionLoads\030\005 \003(\0132\013.Reg" +
"Time\030\010 \001(\004\022\026\n\016infoServerPort\030\t \001(\r\"%\n\tTi" + "ionLoad\022\"\n\014coprocessors\030\006 \003(\0132\014.Coproces" +
"meRange\022\014\n\004from\030\001 \001(\004\022\n\n\002to\030\002 \001(\004\"0\n\006Fil", "sor\022\027\n\017reportStartTime\030\007 \001(\004\022\025\n\rreportEn",
"ter\022\014\n\004name\030\001 \002(\t\022\030\n\020serializedFilter\030\002 " + "dTime\030\010 \001(\004\022\026\n\016infoServerPort\030\t \001(\r\"%\n\tT" +
"\001(\014\"w\n\010KeyValue\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002" + "imeRange\022\014\n\004from\030\001 \001(\004\022\n\n\002to\030\002 \001(\004\"0\n\006Fi" +
" \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttimestamp\030\004 \001" + "lter\022\014\n\004name\030\001 \002(\t\022\030\n\020serializedFilter\030\002" +
"(\004\022\031\n\007keyType\030\005 \001(\0162\010.KeyType\022\r\n\005value\030\006" + " \001(\014\"w\n\010KeyValue\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030" +
" \001(\014\"?\n\nServerName\022\020\n\010hostName\030\001 \002(\t\022\014\n\004" + "\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttimestamp\030\004 " +
"port\030\002 \001(\r\022\021\n\tstartCode\030\003 \001(\004\"\033\n\013Coproce" + "\001(\004\022\031\n\007keyType\030\005 \001(\0162\010.KeyType\022\r\n\005value\030" +
"ssor\022\014\n\004name\030\001 \002(\t\"-\n\016NameStringPair\022\014\n\004" + "\006 \001(\014\"?\n\nServerName\022\020\n\010hostName\030\001 \002(\t\022\014\n" +
"name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\",\n\rNameBytesPa" + "\004port\030\002 \001(\r\022\021\n\tstartCode\030\003 \001(\004\"\033\n\013Coproc" +
"ir\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014\"/\n\016Bytes" + "essor\022\014\n\004name\030\001 \002(\t\"-\n\016NameStringPair\022\014\n" +
"BytesPair\022\r\n\005first\030\001 \002(\014\022\016\n\006second\030\002 \002(\014", "\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\",\n\rNameBytesP",
"\",\n\rNameInt64Pair\022\014\n\004name\030\001 \001(\t\022\r\n\005value" + "air\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014\"/\n\016Byte" +
"\030\002 \001(\003\"\n\n\010EmptyMsg\"\032\n\007LongMsg\022\017\n\007longMsg" + "sBytesPair\022\r\n\005first\030\001 \002(\014\022\016\n\006second\030\002 \002(" +
"\030\001 \002(\003*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n\rLESS_" + "\014\",\n\rNameInt64Pair\022\014\n\004name\030\001 \001(\t\022\r\n\005valu" +
"OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020" + "e\030\002 \001(\003\"\n\n\010EmptyMsg\"\032\n\007LongMsg\022\017\n\007longMs" +
"GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO_OP" + "g\030\001 \002(\003*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n\rLESS" +
"\020\006*_\n\007KeyType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006D" + "_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n" +
"ELETE\020\010\022\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE_FAM" + "\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO_O" +
"ILY\020\016\022\014\n\007MAXIMUM\020\377\001B>\n*org.apache.hadoop" + "P\020\006*_\n\007KeyType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006" +
".hbase.protobuf.generatedB\013HBaseProtosH\001" + "DELETE\020\010\022\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE_FA" +
"\240\001\001" "MILY\020\016\022\014\n\007MAXIMUM\020\377\001B>\n*org.apache.hadoo",
"p.hbase.protobuf.generatedB\013HBaseProtosH" +
"\001\240\001\001"
}; };
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@ -11219,7 +11803,7 @@ public final class HBaseProtos {
internal_static_TableSchema_fieldAccessorTable = new internal_static_TableSchema_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable( com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_TableSchema_descriptor, internal_static_TableSchema_descriptor,
new java.lang.String[] { "Name", "Attributes", "ColumnFamilies", }, new java.lang.String[] { "Name", "Attributes", "ColumnFamilies", "Configuration", },
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class,
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class);
internal_static_ColumnFamilySchema_descriptor = internal_static_ColumnFamilySchema_descriptor =
@ -11227,7 +11811,7 @@ public final class HBaseProtos {
internal_static_ColumnFamilySchema_fieldAccessorTable = new internal_static_ColumnFamilySchema_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable( com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ColumnFamilySchema_descriptor, internal_static_ColumnFamilySchema_descriptor,
new java.lang.String[] { "Name", "Attributes", }, new java.lang.String[] { "Name", "Attributes", "Configuration", },
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class,
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class); org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class);
internal_static_RegionInfo_descriptor = internal_static_RegionInfo_descriptor =

View File

@ -31,6 +31,7 @@ message TableSchema {
optional bytes name = 1; optional bytes name = 1;
repeated BytesBytesPair attributes = 2; repeated BytesBytesPair attributes = 2;
repeated ColumnFamilySchema columnFamilies = 3; repeated ColumnFamilySchema columnFamilies = 3;
repeated NameStringPair configuration = 4;
} }
/** /**
@ -40,6 +41,7 @@ message TableSchema {
message ColumnFamilySchema { message ColumnFamilySchema {
required bytes name = 1; required bytes name = 1;
repeated BytesBytesPair attributes = 2; repeated BytesBytesPair attributes = 2;
repeated NameStringPair configuration = 3;
} }
/** /**

View File

@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
@ -67,7 +68,8 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
// Version 8 -- reintroduction of bloom filters, changed from boolean to enum // Version 8 -- reintroduction of bloom filters, changed from boolean to enum
// Version 9 -- add data block encoding // Version 9 -- add data block encoding
// Version 10 -- change metadata to standard type. // Version 10 -- change metadata to standard type.
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte) 10; // Version 11 -- add column family level configuration.
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte) 11;
// These constants are used as FileInfo keys // These constants are used as FileInfo keys
public static final String COMPRESSION = "COMPRESSION"; public static final String COMPRESSION = "COMPRESSION";
@ -221,9 +223,16 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
private byte [] name; private byte [] name;
// Column metadata // Column metadata
protected final Map<ImmutableBytesWritable, ImmutableBytesWritable> values = private final Map<ImmutableBytesWritable, ImmutableBytesWritable> values =
new HashMap<ImmutableBytesWritable,ImmutableBytesWritable>(); new HashMap<ImmutableBytesWritable,ImmutableBytesWritable>();
/**
* A map which holds the configuration specific to the column family.
* The keys of the map have the same names as config keys and override the defaults with
* cf-specific settings. Example usage may be for compactions, etc.
*/
private final Map<String, String> configuration = new HashMap<String, String>();
/* /*
* Cache the max versions rather than calculate it every time. * Cache the max versions rather than calculate it every time.
*/ */
@ -278,6 +287,9 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
desc.values.entrySet()) { desc.values.entrySet()) {
this.values.put(e.getKey(), e.getValue()); this.values.put(e.getKey(), e.getValue());
} }
for (Map.Entry<String, String> e : desc.configuration.entrySet()) {
this.configuration.put(e.getKey(), e.getValue());
}
setMaxVersions(desc.getMaxVersions()); setMaxVersions(desc.getMaxVersions());
} }
@ -950,6 +962,21 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
} }
s.append('}'); s.append('}');
} }
if (!configuration.isEmpty()) {
s.append(", ");
s.append(HConstants.CONFIGURATION).append(" => ");
s.append('{');
boolean printCommaForConfiguration = false;
for (Map.Entry<String, String> e : configuration.entrySet()) {
if (printCommaForConfiguration) s.append(", ");
printCommaForConfiguration = true;
s.append('\'').append(e.getKey()).append('\'');
s.append(" => ");
s.append('\'').append(e.getValue()).append('\'');
}
s.append("}");
}
return s; return s;
} }
@ -982,6 +1009,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
int result = Bytes.hashCode(this.name); int result = Bytes.hashCode(this.name);
result ^= Byte.valueOf(COLUMN_DESCRIPTOR_VERSION).hashCode(); result ^= Byte.valueOf(COLUMN_DESCRIPTOR_VERSION).hashCode();
result ^= values.hashCode(); result ^= values.hashCode();
result ^= configuration.hashCode();
return result; return result;
} }
@ -1052,6 +1080,19 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
String value = getValue(HConstants.VERSIONS); String value = getValue(HConstants.VERSIONS);
this.cachedMaxVersions = (value != null)? this.cachedMaxVersions = (value != null)?
Integer.valueOf(value).intValue(): DEFAULT_VERSIONS; Integer.valueOf(value).intValue(): DEFAULT_VERSIONS;
if (version > 10) {
configuration.clear();
int numConfigs = in.readInt();
for (int i = 0; i < numConfigs; i++) {
ImmutableBytesWritable key = new ImmutableBytesWritable();
ImmutableBytesWritable val = new ImmutableBytesWritable();
key.readFields(in);
val.readFields(in);
configuration.put(
Bytes.toString(key.get(), key.getOffset(), key.getLength()),
Bytes.toString(val.get(), val.getOffset(), val.getLength()));
}
}
} }
} }
@ -1068,6 +1109,11 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
e.getKey().write(out); e.getKey().write(out);
e.getValue().write(out); e.getValue().write(out);
} }
out.writeInt(configuration.size());
for (Map.Entry<String, String> e : configuration.entrySet()) {
new ImmutableBytesWritable(Bytes.toBytes(e.getKey())).write(out);
new ImmutableBytesWritable(Bytes.toBytes(e.getValue())).write(out);
}
} }
// Comparable // Comparable
@ -1082,6 +1128,13 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
else if (result > 0) else if (result > 0)
result = 1; result = 1;
} }
if (result == 0) {
result = this.configuration.hashCode() - o.configuration.hashCode();
if (result < 0)
result = -1;
else if (result > 0)
result = 1;
}
return result; return result;
} }
@ -1125,6 +1178,9 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
for (BytesBytesPair a: cfs.getAttributesList()) { for (BytesBytesPair a: cfs.getAttributesList()) {
hcd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray()); hcd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
} }
for (NameStringPair a: cfs.getConfigurationList()) {
hcd.setConfiguration(a.getName(), a.getValue());
}
return hcd; return hcd;
} }
@ -1140,6 +1196,47 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
aBuilder.setSecond(ByteString.copyFrom(e.getValue().get())); aBuilder.setSecond(ByteString.copyFrom(e.getValue().get()));
builder.addAttributes(aBuilder.build()); builder.addAttributes(aBuilder.build());
} }
for (Map.Entry<String, String> e : this.configuration.entrySet()) {
NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
aBuilder.setName(e.getKey());
aBuilder.setValue(e.getValue());
builder.addConfiguration(aBuilder.build());
}
return builder.build(); return builder.build();
} }
/**
* Getter for accessing the configuration value by key.
*/
public String getConfigurationValue(String key) {
return configuration.get(key);
}
/**
* Getter for fetching an unmodifiable {@link #configuration} map.
*/
public Map<String, String> getConfiguration() {
// shallow pointer copy
return Collections.unmodifiableMap(configuration);
}
/**
* Setter for storing a configuration setting in {@link #configuration} map.
* @param key Config key. Same as XML config key e.g. hbase.something.or.other.
* @param value String value. If null, removes the configuration.
*/
public void setConfiguration(String key, String value) {
if (value == null) {
removeConfiguration(key);
} else {
configuration.put(key, value);
}
}
/**
* Remove a configuration setting represented by the key from the {@link #configuration} map.
*/
public void removeConfiguration(final String key) {
configuration.remove(key);
}
} }

View File

@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -65,8 +66,9 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* Version 4 adds indexes * Version 4 adds indexes
* Version 5 removed transactional pollution -- e.g. indexes * Version 5 removed transactional pollution -- e.g. indexes
* Version 6 changed metadata to BytesBytesPair in PB * Version 6 changed metadata to BytesBytesPair in PB
* Version 7 adds table-level configuration
*/ */
private static final byte TABLE_DESCRIPTOR_VERSION = 6; private static final byte TABLE_DESCRIPTOR_VERSION = 7;
private byte [] name = HConstants.EMPTY_BYTE_ARRAY; private byte [] name = HConstants.EMPTY_BYTE_ARRAY;
@ -77,9 +79,16 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* includes values like IS_ROOT, IS_META, DEFERRED_LOG_FLUSH, SPLIT_POLICY, * includes values like IS_ROOT, IS_META, DEFERRED_LOG_FLUSH, SPLIT_POLICY,
* MAX_FILE_SIZE, READONLY, MEMSTORE_FLUSHSIZE etc... * MAX_FILE_SIZE, READONLY, MEMSTORE_FLUSHSIZE etc...
*/ */
protected final Map<ImmutableBytesWritable, ImmutableBytesWritable> values = private final Map<ImmutableBytesWritable, ImmutableBytesWritable> values =
new HashMap<ImmutableBytesWritable, ImmutableBytesWritable>(); new HashMap<ImmutableBytesWritable, ImmutableBytesWritable>();
/**
* A map which holds the configuration specific to the table.
* The keys of the map have the same names as config keys and override the defaults with
* table-specific settings. Example usage may be for compactions, etc.
*/
private final Map<String, String> configuration = new HashMap<String, String>();
public static final String SPLIT_POLICY = "SPLIT_POLICY"; public static final String SPLIT_POLICY = "SPLIT_POLICY";
/** /**
@ -236,7 +245,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
} }
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry: for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry:
values.entrySet()) { values.entrySet()) {
this.values.put(entry.getKey(), entry.getValue()); setValue(entry.getKey(), entry.getValue());
} }
} }
@ -295,7 +304,10 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
} }
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e: for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
desc.values.entrySet()) { desc.values.entrySet()) {
this.values.put(e.getKey(), e.getValue()); setValue(e.getKey(), e.getValue());
}
for (Map.Entry<String, String> e : desc.configuration.entrySet()) {
this.configuration.put(e.getKey(), e.getValue());
} }
} }
@ -333,7 +345,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
*/ */
protected void setRootRegion(boolean isRoot) { protected void setRootRegion(boolean isRoot) {
// TODO: Make the value a boolean rather than String of boolean. // TODO: Make the value a boolean rather than String of boolean.
values.put(IS_ROOT_KEY, isRoot? TRUE: FALSE); setValue(IS_ROOT_KEY, isRoot? TRUE: FALSE);
} }
/** /**
@ -374,7 +386,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* <code> .META. </code> region * <code> .META. </code> region
*/ */
protected void setMetaRegion(boolean isMeta) { protected void setMetaRegion(boolean isMeta) {
values.put(IS_META_KEY, isMeta? TRUE: FALSE); setValue(IS_META_KEY, isMeta? TRUE: FALSE);
} }
/** /**
@ -487,7 +499,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* @see #values * @see #values
*/ */
public void setValue(byte[] key, byte[] value) { public void setValue(byte[] key, byte[] value) {
setValue(new ImmutableBytesWritable(key), value); setValue(new ImmutableBytesWritable(key), new ImmutableBytesWritable(value));
} }
/* /*
@ -495,8 +507,8 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* @param value The value. * @param value The value.
*/ */
private void setValue(final ImmutableBytesWritable key, private void setValue(final ImmutableBytesWritable key,
final byte[] value) { final String value) {
values.put(key, new ImmutableBytesWritable(value)); setValue(key, new ImmutableBytesWritable(Bytes.toBytes(value)));
} }
/* /*
@ -517,21 +529,11 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
*/ */
public void setValue(String key, String value) { public void setValue(String key, String value) {
if (value == null) { if (value == null) {
remove(Bytes.toBytes(key)); remove(key);
} else { } else {
setValue(Bytes.toBytes(key), Bytes.toBytes(value)); setValue(Bytes.toBytes(key), Bytes.toBytes(value));
} }
} }
/**
* Remove metadata represented by the key from the {@link #values} map
*
* @param key Key whose key and value we're to remove from HTableDescriptor
* parameters.
*/
public void remove(final byte [] key) {
values.remove(new ImmutableBytesWritable(key));
}
/** /**
* Remove metadata represented by the key from the {@link #values} map * Remove metadata represented by the key from the {@link #values} map
@ -540,7 +542,17 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* parameters. * parameters.
*/ */
public void remove(final String key) { public void remove(final String key) {
remove(Bytes.toBytes(key)); remove(new ImmutableBytesWritable(Bytes.toBytes(key)));
}
/**
* Remove metadata represented by the key from the {@link #values} map
*
* @param key Key whose key and value we're to remove from HTableDescriptor
* parameters.
*/
public void remove(ImmutableBytesWritable key) {
values.remove(key);
} }
/** /**
@ -673,7 +685,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* before a split is triggered. * before a split is triggered.
*/ */
public void setMaxFileSize(long maxFileSize) { public void setMaxFileSize(long maxFileSize) {
setValue(MAX_FILESIZE_KEY, Bytes.toBytes(Long.toString(maxFileSize))); setValue(MAX_FILESIZE_KEY, Long.toString(maxFileSize));
} }
/** /**
@ -698,8 +710,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* @param memstoreFlushSize memory cache flush size for each hregion * @param memstoreFlushSize memory cache flush size for each hregion
*/ */
public void setMemStoreFlushSize(long memstoreFlushSize) { public void setMemStoreFlushSize(long memstoreFlushSize) {
setValue(MEMSTORE_FLUSHSIZE_KEY, setValue(MEMSTORE_FLUSHSIZE_KEY, Long.toString(memstoreFlushSize));
Bytes.toBytes(Long.toString(memstoreFlushSize)));
} }
/** /**
@ -757,13 +768,13 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
// step 1: set partitioning and pruning // step 1: set partitioning and pruning
Set<ImmutableBytesWritable> reservedKeys = new TreeSet<ImmutableBytesWritable>(); Set<ImmutableBytesWritable> reservedKeys = new TreeSet<ImmutableBytesWritable>();
Set<ImmutableBytesWritable> configKeys = new TreeSet<ImmutableBytesWritable>(); Set<ImmutableBytesWritable> userKeys = new TreeSet<ImmutableBytesWritable>();
for (ImmutableBytesWritable k : values.keySet()) { for (ImmutableBytesWritable k : values.keySet()) {
if (k == null || k.get() == null) continue; if (k == null || k.get() == null) continue;
String key = Bytes.toString(k.get()); String key = Bytes.toString(k.get());
// in this section, print out reserved keywords + coprocessor info // in this section, print out reserved keywords + coprocessor info
if (!RESERVED_KEYWORDS.contains(k) && !key.startsWith("coprocessor$")) { if (!RESERVED_KEYWORDS.contains(k) && !key.startsWith("coprocessor$")) {
configKeys.add(k); userKeys.add(k);
continue; continue;
} }
// only print out IS_ROOT/IS_META if true // only print out IS_ROOT/IS_META if true
@ -780,50 +791,67 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
} }
// early exit optimization // early exit optimization
if (reservedKeys.isEmpty() && configKeys.isEmpty()) return s; boolean hasAttributes = !reservedKeys.isEmpty() || !userKeys.isEmpty();
if (!hasAttributes && configuration.isEmpty()) return s;
// step 2: printing s.append(", {");
s.append(", {TABLE_ATTRIBUTES => {"); // step 2: printing attributes
if (hasAttributes) {
s.append("TABLE_ATTRIBUTES => {");
// print all reserved keys first // print all reserved keys first
boolean printCommaForAttr = false; boolean printCommaForAttr = false;
for (ImmutableBytesWritable k : reservedKeys) { for (ImmutableBytesWritable k : reservedKeys) {
String key = Bytes.toString(k.get());
String value = Bytes.toString(values.get(k).get());
if (printCommaForAttr) s.append(", ");
printCommaForAttr = true;
s.append(key);
s.append(" => ");
s.append('\'').append(value).append('\'');
}
if (!configKeys.isEmpty()) {
// print all non-reserved, advanced config keys as a separate subset
if (printCommaForAttr) s.append(", ");
printCommaForAttr = true;
s.append(HConstants.METADATA).append(" => ");
s.append("{");
boolean printCommaForCfg = false;
for (ImmutableBytesWritable k : configKeys) {
String key = Bytes.toString(k.get()); String key = Bytes.toString(k.get());
String value = Bytes.toString(values.get(k).get()); String value = Bytes.toString(values.get(k).get());
if (printCommaForCfg) s.append(", "); if (printCommaForAttr) s.append(", ");
printCommaForCfg = true; printCommaForAttr = true;
s.append('\'').append(key).append('\''); s.append(key);
s.append(" => "); s.append(" => ");
s.append('\'').append(value).append('\''); s.append('\'').append(value).append('\'');
} }
s.append("}");
if (!userKeys.isEmpty()) {
// print all non-reserved, advanced config keys as a separate subset
if (printCommaForAttr) s.append(", ");
printCommaForAttr = true;
s.append(HConstants.METADATA).append(" => ");
s.append("{");
boolean printCommaForCfg = false;
for (ImmutableBytesWritable k : userKeys) {
String key = Bytes.toString(k.get());
String value = Bytes.toString(values.get(k).get());
if (printCommaForCfg) s.append(", ");
printCommaForCfg = true;
s.append('\'').append(key).append('\'');
s.append(" => ");
s.append('\'').append(value).append('\'');
}
s.append("}");
}
} }
s.append("}}"); // end METHOD // step 3: printing all configuration:
if (!configuration.isEmpty()) {
if (hasAttributes) {
s.append(", ");
}
s.append(HConstants.CONFIGURATION).append(" => ");
s.append('{');
boolean printCommaForConfig = false;
for (Map.Entry<String, String> e : configuration.entrySet()) {
if (printCommaForConfig) s.append(", ");
printCommaForConfig = true;
s.append('\'').append(e.getKey()).append('\'');
s.append(" => ");
s.append('\'').append(e.getValue()).append('\'');
}
s.append("}");
}
s.append("}"); // end METHOD
return s; return s;
} }
public static Map<String, String> getDefaultValues() {
return Collections.unmodifiableMap(DEFAULT_VALUES);
}
/** /**
* Compare the contents of the descriptor with another one passed as a parameter. * Compare the contents of the descriptor with another one passed as a parameter.
* Checks if the obj passed is an instance of HTableDescriptor, if yes then the * Checks if the obj passed is an instance of HTableDescriptor, if yes then the
@ -860,6 +888,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
} }
} }
result ^= values.hashCode(); result ^= values.hashCode();
result ^= configuration.hashCode();
return result; return result;
} }
@ -880,13 +909,14 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
setRootRegion(in.readBoolean()); setRootRegion(in.readBoolean());
setMetaRegion(in.readBoolean()); setMetaRegion(in.readBoolean());
values.clear(); values.clear();
configuration.clear();
int numVals = in.readInt(); int numVals = in.readInt();
for (int i = 0; i < numVals; i++) { for (int i = 0; i < numVals; i++) {
ImmutableBytesWritable key = new ImmutableBytesWritable(); ImmutableBytesWritable key = new ImmutableBytesWritable();
ImmutableBytesWritable value = new ImmutableBytesWritable(); ImmutableBytesWritable value = new ImmutableBytesWritable();
key.readFields(in); key.readFields(in);
value.readFields(in); value.readFields(in);
values.put(key, value); setValue(key, value);
} }
families.clear(); families.clear();
int numFamilies = in.readInt(); int numFamilies = in.readInt();
@ -895,8 +925,17 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
c.readFields(in); c.readFields(in);
families.put(c.getName(), c); families.put(c.getName(), c);
} }
if (version < 4) { if (version >= 7) {
return; int numConfigs = in.readInt();
for (int i = 0; i < numConfigs; i++) {
ImmutableBytesWritable key = new ImmutableBytesWritable();
ImmutableBytesWritable value = new ImmutableBytesWritable();
key.readFields(in);
value.readFields(in);
configuration.put(
Bytes.toString(key.get(), key.getOffset(), key.getLength()),
Bytes.toString(value.get(), value.getOffset(), value.getLength()));
}
} }
} }
@ -925,6 +964,11 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
HColumnDescriptor family = it.next(); HColumnDescriptor family = it.next();
family.write(out); family.write(out);
} }
out.writeInt(configuration.size());
for (Map.Entry<String, String> e : configuration.entrySet()) {
new ImmutableBytesWritable(Bytes.toBytes(e.getKey())).write(out);
new ImmutableBytesWritable(Bytes.toBytes(e.getValue())).write(out);
}
} }
// Comparable // Comparable
@ -963,6 +1007,13 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
else if (result > 0) else if (result > 0)
result = 1; result = 1;
} }
if (result == 0) {
result = this.configuration.hashCode() - other.configuration.hashCode();
if (result < 0)
result = -1;
else if (result > 0)
result = 1;
}
return result; return result;
} }
@ -1170,7 +1221,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
} }
// if we found a match, remove it // if we found a match, remove it
if (match != null) if (match != null)
this.values.remove(match); remove(match);
} }
/** /**
@ -1218,9 +1269,9 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
@Deprecated @Deprecated
public void setOwnerString(String ownerString) { public void setOwnerString(String ownerString) {
if (ownerString != null) { if (ownerString != null) {
setValue(OWNER_KEY, Bytes.toBytes(ownerString)); setValue(OWNER_KEY, ownerString);
} else { } else {
values.remove(OWNER_KEY); remove(OWNER_KEY);
} }
} }
@ -1281,6 +1332,12 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
for (HColumnDescriptor hcd: getColumnFamilies()) { for (HColumnDescriptor hcd: getColumnFamilies()) {
builder.addColumnFamilies(hcd.convert()); builder.addColumnFamilies(hcd.convert());
} }
for (Map.Entry<String, String> e : this.configuration.entrySet()) {
NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
aBuilder.setName(e.getKey());
aBuilder.setValue(e.getValue());
builder.addConfiguration(aBuilder.build());
}
return builder.build(); return builder.build();
} }
@ -1299,6 +1356,44 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
for (BytesBytesPair a: ts.getAttributesList()) { for (BytesBytesPair a: ts.getAttributesList()) {
htd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray()); htd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
} }
for (NameStringPair a: ts.getConfigurationList()) {
htd.setConfiguration(a.getName(), a.getValue());
}
return htd; return htd;
} }
/**
* Getter for accessing the configuration value by key
*/
public String getConfigurationValue(String key) {
return configuration.get(key);
}
/**
* Getter for fetching an unmodifiable {@link #configuration} map.
*/
public Map<String, String> getConfiguration() {
// shallow pointer copy
return Collections.unmodifiableMap(configuration);
}
/**
* Setter for storing a configuration setting in {@link #configuration} map.
* @param key Config key. Same as XML config key e.g. hbase.something.or.other.
* @param value String value. If null, removes the setting.
*/
public void setConfiguration(String key, String value) {
if (value == null) {
removeConfiguration(key);
} else {
configuration.put(key, value);
}
}
/**
* Remove a config setting represented by the key from the {@link #configuration} map
*/
public void removeConfiguration(final String key) {
configuration.remove(key);
}
} }

View File

@ -133,7 +133,7 @@ public final class Constraints {
} }
// now remove all the keys we found // now remove all the keys we found
for (ImmutableBytesWritable key : keys) { for (ImmutableBytesWritable key : keys) {
desc.remove(key.get()); desc.remove(key);
} }
} }

View File

@ -440,6 +440,7 @@ public class HRegion implements HeapSize { // , Writable{
this.baseConf = confParam; this.baseConf = confParam;
this.conf = new CompoundConfiguration() this.conf = new CompoundConfiguration()
.add(confParam) .add(confParam)
.addStringMap(htd.getConfiguration())
.addWritableMap(htd.getValues()); .addWritableMap(htd.getValues());
this.rowLockWaitDuration = conf.getInt("hbase.rowlock.wait.duration", this.rowLockWaitDuration = conf.getInt("hbase.rowlock.wait.duration",
DEFAULT_ROWLOCK_WAIT_DURATION); DEFAULT_ROWLOCK_WAIT_DURATION);

View File

@ -184,8 +184,12 @@ public class HStore implements Store, StoreConfiguration {
this.region = region; this.region = region;
this.family = family; this.family = family;
// 'conf' renamed to 'confParam' b/c we use this.conf in the constructor // 'conf' renamed to 'confParam' b/c we use this.conf in the constructor
// CompoundConfiguration will look for keys in reverse order of addition, so we'd
// add global config first, then table and cf overrides, then cf metadata.
this.conf = new CompoundConfiguration() this.conf = new CompoundConfiguration()
.add(confParam) .add(confParam)
.addStringMap(region.getTableDesc().getConfiguration())
.addStringMap(family.getConfiguration())
.addWritableMap(family.getValues()); .addWritableMap(family.getValues());
this.blocksize = family.getBlocksize(); this.blocksize = family.getBlocksize();

View File

@ -56,6 +56,7 @@ module HBaseConstants
SPLITS_FILE = 'SPLITS_FILE' SPLITS_FILE = 'SPLITS_FILE'
SPLITALGO = 'SPLITALGO' SPLITALGO = 'SPLITALGO'
NUMREGIONS = 'NUMREGIONS' NUMREGIONS = 'NUMREGIONS'
CONFIGURATION = org.apache.hadoop.hbase.HConstants::CONFIGURATION
# Load constants from hbase java API # Load constants from hbase java API
def self.promote_constants(constants) def self.promote_constants(constants)

View File

@ -260,6 +260,7 @@ module Hbase
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if arg[MEMSTORE_FLUSHSIZE] htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if arg[MEMSTORE_FLUSHSIZE]
htd.setDeferredLogFlush(JBoolean.valueOf(arg.delete(DEFERRED_LOG_FLUSH))) if arg[DEFERRED_LOG_FLUSH] htd.setDeferredLogFlush(JBoolean.valueOf(arg.delete(DEFERRED_LOG_FLUSH))) if arg[DEFERRED_LOG_FLUSH]
set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA] set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
set_descriptor_config(htd, arg.delete(CONFIGURATION)) if arg[CONFIGURATION]
arg.each_key do |ignored_key| arg.each_key do |ignored_key|
puts("An argument ignored (unknown or overridden): %s" % [ ignored_key ]) puts("An argument ignored (unknown or overridden): %s" % [ ignored_key ])
@ -420,7 +421,7 @@ module Hbase
if (htd.getValue(name) == nil) if (htd.getValue(name) == nil)
raise ArgumentError, "Can not find attribute: #{name}" raise ArgumentError, "Can not find attribute: #{name}"
end end
htd.remove(name.to_java_bytes) htd.remove(name)
@admin.modifyTable(table_name.to_java_bytes, htd) @admin.modifyTable(table_name.to_java_bytes, htd)
# Unknown method # Unknown method
else else
@ -446,11 +447,12 @@ module Hbase
# 3) Some args for the table, optionally with METHOD => table_att (deprecated) # 3) Some args for the table, optionally with METHOD => table_att (deprecated)
raise(ArgumentError, "NAME argument in an unexpected place") if name raise(ArgumentError, "NAME argument in an unexpected place") if name
htd.setOwnerString(arg.delete(OWNER)) if arg[OWNER] htd.setOwnerString(arg.delete(OWNER)) if arg[OWNER]
set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
htd.setMaxFileSize(JLong.valueOf(arg.delete(MAX_FILESIZE))) if arg[MAX_FILESIZE] htd.setMaxFileSize(JLong.valueOf(arg.delete(MAX_FILESIZE))) if arg[MAX_FILESIZE]
htd.setReadOnly(JBoolean.valueOf(arg.delete(READONLY))) if arg[READONLY] htd.setReadOnly(JBoolean.valueOf(arg.delete(READONLY))) if arg[READONLY]
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if arg[MEMSTORE_FLUSHSIZE] htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if arg[MEMSTORE_FLUSHSIZE]
htd.setDeferredLogFlush(JBoolean.valueOf(arg.delete(DEFERRED_LOG_FLUSH))) if arg[DEFERRED_LOG_FLUSH] htd.setDeferredLogFlush(JBoolean.valueOf(arg.delete(DEFERRED_LOG_FLUSH))) if arg[DEFERRED_LOG_FLUSH]
set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
set_descriptor_config(htd, arg.delete(CONFIGURATION)) if arg[CONFIGURATION]
# set a coprocessor attribute # set a coprocessor attribute
valid_coproc_keys = [] valid_coproc_keys = []
@ -613,6 +615,7 @@ module Hbase
end end
set_user_metadata(family, arg.delete(METADATA)) if arg[METADATA] set_user_metadata(family, arg.delete(METADATA)) if arg[METADATA]
set_descriptor_config(family, arg.delete(CONFIGURATION)) if arg[CONFIGURATION]
arg.each_key do |unknown_key| arg.each_key do |unknown_key|
puts("Unknown argument ignored for column family %s: %s" % [name, unknown_key]) puts("Unknown argument ignored for column family %s: %s" % [name, unknown_key])
@ -652,5 +655,14 @@ module Hbase
end end
end end
# Apply config specific to a table/column to its descriptor
def set_descriptor_config(descriptor, config)
raise(ArgumentError, "#{CONFIGURATION} must be a Hash type") unless config.kind_of?(Hash)
for k,v in config
v = v.to_s unless v.nil?
descriptor.setConfiguration(k, v)
end
end
end end
end end

View File

@ -79,4 +79,18 @@ public class TestHColumnDescriptor {
assertEquals("Family name can not be empty", e.getLocalizedMessage()); assertEquals("Family name can not be empty", e.getLocalizedMessage());
} }
} }
/**
* Test that we add and remove strings from configuration properly.
*/
@Test
public void testAddGetRemoveConfiguration() throws Exception {
HColumnDescriptor desc = new HColumnDescriptor("foo");
String key = "Some";
String value = "value";
desc.setConfiguration(key, value);
assertEquals(value, desc.getConfigurationValue(key));
desc.removeConfiguration(key);
assertEquals(null, desc.getConfigurationValue(key));
}
} }

View File

@ -100,4 +100,18 @@ public class TestHTableDescriptor {
desc.setMemStoreFlushSize(1111L); desc.setMemStoreFlushSize(1111L);
assertEquals(1111L, desc.getMemStoreFlushSize()); assertEquals(1111L, desc.getMemStoreFlushSize());
} }
/**
* Test that we add and remove strings from configuration properly.
*/
@Test
public void testAddGetRemoveConfiguration() throws Exception {
HTableDescriptor desc = new HTableDescriptor("table");
String key = "Some";
String value = "value";
desc.setConfiguration(key, value);
assertEquals(value, desc.getConfigurationValue(key));
desc.removeConfiguration(key);
assertEquals(null, desc.getConfigurationValue(key));
}
} }

View File

@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogFactory; import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper; import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
@ -134,9 +135,15 @@ public class TestStore extends TestCase {
hcd.setMaxVersions(4); hcd.setMaxVersions(4);
init(methodName, conf, hcd); init(methodName, conf, hcd);
} }
private void init(String methodName, Configuration conf, private void init(String methodName, Configuration conf,
HColumnDescriptor hcd) throws IOException { HColumnDescriptor hcd) throws IOException {
HTableDescriptor htd = new HTableDescriptor(table);
init(methodName, conf, htd, hcd);
}
private void init(String methodName, Configuration conf, HTableDescriptor htd,
HColumnDescriptor hcd) throws IOException {
//Setting up a Store //Setting up a Store
Path basedir = new Path(DIR+methodName); Path basedir = new Path(DIR+methodName);
String logName = "logs"; String logName = "logs";
@ -146,7 +153,6 @@ public class TestStore extends TestCase {
fs.delete(logdir, true); fs.delete(logdir, true);
HTableDescriptor htd = new HTableDescriptor(table);
htd.addFamily(hcd); htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
HLog hlog = HLogFactory.createHLog(fs, basedir, logName, conf); HLog hlog = HLogFactory.createHLog(fs, basedir, logName, conf);
@ -817,5 +823,34 @@ public class TestStore extends TestCase {
store.getHRegion().clearSplit_TESTS_ONLY(); store.getHRegion().clearSplit_TESTS_ONLY();
} }
public void testStoreUsesConfigurationFromHcdAndHtd() throws Exception {
final String CONFIG_KEY = "hbase.regionserver.thread.compaction.throttle";
long anyValue = 10;
// We'll check that it uses correct config and propagates it appropriately by going thru
// the simplest "real" path I can find - "throttleCompaction", which just checks whether
// a number we pass in is higher than some config value, inside compactionPolicy.
Configuration conf = HBaseConfiguration.create();
conf.setLong(CONFIG_KEY, anyValue);
init(getName() + "-xml", conf);
assertTrue(store.throttleCompaction(anyValue + 1));
assertFalse(store.throttleCompaction(anyValue));
// HTD overrides XML.
--anyValue;
HTableDescriptor htd = new HTableDescriptor(table);
HColumnDescriptor hcd = new HColumnDescriptor(family);
htd.setConfiguration(CONFIG_KEY, Long.toString(anyValue));
init(getName() + "-htd", conf, htd, hcd);
assertTrue(store.throttleCompaction(anyValue + 1));
assertFalse(store.throttleCompaction(anyValue));
// HCD overrides them both.
--anyValue;
hcd.setConfiguration(CONFIG_KEY, Long.toString(anyValue));
init(getName() + "-hcd", conf, htd, hcd);
assertTrue(store.throttleCompaction(anyValue + 1));
assertFalse(store.throttleCompaction(anyValue));
}
} }