HBASE-7571 add the notion of per-table or per-column family configuration (Sergey)

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1438527 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Zhihong Yu 2013-01-25 14:11:45 +00:00
parent a650c87f1f
commit f95692ce65
13 changed files with 983 additions and 123 deletions

View File

@ -454,6 +454,7 @@ public final class HConstants {
public static final String VERSIONS = "VERSIONS";
public static final String IN_MEMORY = "IN_MEMORY";
public static final String METADATA = "METADATA";
public static final String CONFIGURATION = "CONFIGURATION";
/**
* This is a retry backoff multiplier table similar to the BSD TCP syn

View File

@ -199,6 +199,16 @@ public final class HBaseProtos {
getColumnFamiliesOrBuilderList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder getColumnFamiliesOrBuilder(
int index);
// repeated .NameStringPair configuration = 4;
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>
getConfigurationList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index);
int getConfigurationCount();
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index);
}
public static final class TableSchema extends
com.google.protobuf.GeneratedMessage
@ -281,10 +291,32 @@ public final class HBaseProtos {
return columnFamilies_.get(index);
}
// repeated .NameStringPair configuration = 4;
public static final int CONFIGURATION_FIELD_NUMBER = 4;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
return configuration_;
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList() {
return configuration_;
}
public int getConfigurationCount() {
return configuration_.size();
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
return configuration_.get(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index) {
return configuration_.get(index);
}
private void initFields() {
name_ = com.google.protobuf.ByteString.EMPTY;
attributes_ = java.util.Collections.emptyList();
columnFamilies_ = java.util.Collections.emptyList();
configuration_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@ -303,6 +335,12 @@ public final class HBaseProtos {
return false;
}
}
for (int i = 0; i < getConfigurationCount(); i++) {
if (!getConfiguration(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@ -319,6 +357,9 @@ public final class HBaseProtos {
for (int i = 0; i < columnFamilies_.size(); i++) {
output.writeMessage(3, columnFamilies_.get(i));
}
for (int i = 0; i < configuration_.size(); i++) {
output.writeMessage(4, configuration_.get(i));
}
getUnknownFields().writeTo(output);
}
@ -340,6 +381,10 @@ public final class HBaseProtos {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, columnFamilies_.get(i));
}
for (int i = 0; i < configuration_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(4, configuration_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
@ -372,6 +417,8 @@ public final class HBaseProtos {
.equals(other.getAttributesList());
result = result && getColumnFamiliesList()
.equals(other.getColumnFamiliesList());
result = result && getConfigurationList()
.equals(other.getConfigurationList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
@ -393,6 +440,10 @@ public final class HBaseProtos {
hash = (37 * hash) + COLUMNFAMILIES_FIELD_NUMBER;
hash = (53 * hash) + getColumnFamiliesList().hashCode();
}
if (getConfigurationCount() > 0) {
hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER;
hash = (53 * hash) + getConfigurationList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
return hash;
}
@ -503,6 +554,7 @@ public final class HBaseProtos {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getAttributesFieldBuilder();
getColumnFamiliesFieldBuilder();
getConfigurationFieldBuilder();
}
}
private static Builder create() {
@ -525,6 +577,12 @@ public final class HBaseProtos {
} else {
columnFamiliesBuilder_.clear();
}
if (configurationBuilder_ == null) {
configuration_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
} else {
configurationBuilder_.clear();
}
return this;
}
@ -585,6 +643,15 @@ public final class HBaseProtos {
} else {
result.columnFamilies_ = columnFamiliesBuilder_.build();
}
if (configurationBuilder_ == null) {
if (((bitField0_ & 0x00000008) == 0x00000008)) {
configuration_ = java.util.Collections.unmodifiableList(configuration_);
bitField0_ = (bitField0_ & ~0x00000008);
}
result.configuration_ = configuration_;
} else {
result.configuration_ = configurationBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
@ -656,6 +723,32 @@ public final class HBaseProtos {
}
}
}
if (configurationBuilder_ == null) {
if (!other.configuration_.isEmpty()) {
if (configuration_.isEmpty()) {
configuration_ = other.configuration_;
bitField0_ = (bitField0_ & ~0x00000008);
} else {
ensureConfigurationIsMutable();
configuration_.addAll(other.configuration_);
}
onChanged();
}
} else {
if (!other.configuration_.isEmpty()) {
if (configurationBuilder_.isEmpty()) {
configurationBuilder_.dispose();
configurationBuilder_ = null;
configuration_ = other.configuration_;
bitField0_ = (bitField0_ & ~0x00000008);
configurationBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getConfigurationFieldBuilder() : null;
} else {
configurationBuilder_.addAllMessages(other.configuration_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
@ -673,6 +766,12 @@ public final class HBaseProtos {
return false;
}
}
for (int i = 0; i < getConfigurationCount(); i++) {
if (!getConfiguration(i).isInitialized()) {
return false;
}
}
return true;
}
@ -716,6 +815,12 @@ public final class HBaseProtos {
addColumnFamilies(subBuilder.buildPartial());
break;
}
case 34: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addConfiguration(subBuilder.buildPartial());
break;
}
}
}
}
@ -1118,6 +1223,192 @@ public final class HBaseProtos {
return columnFamiliesBuilder_;
}
// repeated .NameStringPair configuration = 4;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ =
java.util.Collections.emptyList();
private void ensureConfigurationIsMutable() {
if (!((bitField0_ & 0x00000008) == 0x00000008)) {
configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_);
bitField0_ |= 0x00000008;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
if (configurationBuilder_ == null) {
return java.util.Collections.unmodifiableList(configuration_);
} else {
return configurationBuilder_.getMessageList();
}
}
public int getConfigurationCount() {
if (configurationBuilder_ == null) {
return configuration_.size();
} else {
return configurationBuilder_.getCount();
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
if (configurationBuilder_ == null) {
return configuration_.get(index);
} else {
return configurationBuilder_.getMessage(index);
}
}
public Builder setConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.set(index, value);
onChanged();
} else {
configurationBuilder_.setMessage(index, value);
}
return this;
}
public Builder setConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.set(index, builderForValue.build());
onChanged();
} else {
configurationBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.add(value);
onChanged();
} else {
configurationBuilder_.addMessage(value);
}
return this;
}
public Builder addConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.add(index, value);
onChanged();
} else {
configurationBuilder_.addMessage(index, value);
}
return this;
}
public Builder addConfiguration(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.add(builderForValue.build());
onChanged();
} else {
configurationBuilder_.addMessage(builderForValue.build());
}
return this;
}
public Builder addConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.add(index, builderForValue.build());
onChanged();
} else {
configurationBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
public Builder addAllConfiguration(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
super.addAll(values, configuration_);
onChanged();
} else {
configurationBuilder_.addAllMessages(values);
}
return this;
}
public Builder clearConfiguration() {
if (configurationBuilder_ == null) {
configuration_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
} else {
configurationBuilder_.clear();
}
return this;
}
public Builder removeConfiguration(int index) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.remove(index);
onChanged();
} else {
configurationBuilder_.remove(index);
}
return this;
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder(
int index) {
return getConfigurationFieldBuilder().getBuilder(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index) {
if (configurationBuilder_ == null) {
return configuration_.get(index); } else {
return configurationBuilder_.getMessageOrBuilder(index);
}
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList() {
if (configurationBuilder_ != null) {
return configurationBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(configuration_);
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() {
return getConfigurationFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder(
int index) {
return getConfigurationFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
}
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder>
getConfigurationBuilderList() {
return getConfigurationFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationFieldBuilder() {
if (configurationBuilder_ == null) {
configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>(
configuration_,
((bitField0_ & 0x00000008) == 0x00000008),
getParentForChildren(),
isClean());
configuration_ = null;
}
return configurationBuilder_;
}
// @@protoc_insertion_point(builder_scope:TableSchema)
}
@ -1145,6 +1436,16 @@ public final class HBaseProtos {
getAttributesOrBuilderList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getAttributesOrBuilder(
int index);
// repeated .NameStringPair configuration = 3;
java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>
getConfigurationList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index);
int getConfigurationCount();
java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList();
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index);
}
public static final class ColumnFamilySchema extends
com.google.protobuf.GeneratedMessage
@ -1206,9 +1507,31 @@ public final class HBaseProtos {
return attributes_.get(index);
}
// repeated .NameStringPair configuration = 3;
public static final int CONFIGURATION_FIELD_NUMBER = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
return configuration_;
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList() {
return configuration_;
}
public int getConfigurationCount() {
return configuration_.size();
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
return configuration_.get(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index) {
return configuration_.get(index);
}
private void initFields() {
name_ = com.google.protobuf.ByteString.EMPTY;
attributes_ = java.util.Collections.emptyList();
configuration_ = java.util.Collections.emptyList();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@ -1225,6 +1548,12 @@ public final class HBaseProtos {
return false;
}
}
for (int i = 0; i < getConfigurationCount(); i++) {
if (!getConfiguration(i).isInitialized()) {
memoizedIsInitialized = 0;
return false;
}
}
memoizedIsInitialized = 1;
return true;
}
@ -1238,6 +1567,9 @@ public final class HBaseProtos {
for (int i = 0; i < attributes_.size(); i++) {
output.writeMessage(2, attributes_.get(i));
}
for (int i = 0; i < configuration_.size(); i++) {
output.writeMessage(3, configuration_.get(i));
}
getUnknownFields().writeTo(output);
}
@ -1255,6 +1587,10 @@ public final class HBaseProtos {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, attributes_.get(i));
}
for (int i = 0; i < configuration_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, configuration_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
@ -1285,6 +1621,8 @@ public final class HBaseProtos {
}
result = result && getAttributesList()
.equals(other.getAttributesList());
result = result && getConfigurationList()
.equals(other.getConfigurationList());
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
@ -1302,6 +1640,10 @@ public final class HBaseProtos {
hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
hash = (53 * hash) + getAttributesList().hashCode();
}
if (getConfigurationCount() > 0) {
hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER;
hash = (53 * hash) + getConfigurationList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
return hash;
}
@ -1411,6 +1753,7 @@ public final class HBaseProtos {
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getAttributesFieldBuilder();
getConfigurationFieldBuilder();
}
}
private static Builder create() {
@ -1427,6 +1770,12 @@ public final class HBaseProtos {
} else {
attributesBuilder_.clear();
}
if (configurationBuilder_ == null) {
configuration_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
} else {
configurationBuilder_.clear();
}
return this;
}
@ -1478,6 +1827,15 @@ public final class HBaseProtos {
} else {
result.attributes_ = attributesBuilder_.build();
}
if (configurationBuilder_ == null) {
if (((bitField0_ & 0x00000004) == 0x00000004)) {
configuration_ = java.util.Collections.unmodifiableList(configuration_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.configuration_ = configuration_;
} else {
result.configuration_ = configurationBuilder_.build();
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
@ -1523,6 +1881,32 @@ public final class HBaseProtos {
}
}
}
if (configurationBuilder_ == null) {
if (!other.configuration_.isEmpty()) {
if (configuration_.isEmpty()) {
configuration_ = other.configuration_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureConfigurationIsMutable();
configuration_.addAll(other.configuration_);
}
onChanged();
}
} else {
if (!other.configuration_.isEmpty()) {
if (configurationBuilder_.isEmpty()) {
configurationBuilder_.dispose();
configurationBuilder_ = null;
configuration_ = other.configuration_;
bitField0_ = (bitField0_ & ~0x00000004);
configurationBuilder_ =
com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
getConfigurationFieldBuilder() : null;
} else {
configurationBuilder_.addAllMessages(other.configuration_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
@ -1538,6 +1922,12 @@ public final class HBaseProtos {
return false;
}
}
for (int i = 0; i < getConfigurationCount(); i++) {
if (!getConfiguration(i).isInitialized()) {
return false;
}
}
return true;
}
@ -1575,6 +1965,12 @@ public final class HBaseProtos {
addAttributes(subBuilder.buildPartial());
break;
}
case 26: {
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder();
input.readMessage(subBuilder, extensionRegistry);
addConfiguration(subBuilder.buildPartial());
break;
}
}
}
}
@ -1791,6 +2187,192 @@ public final class HBaseProtos {
return attributesBuilder_;
}
// repeated .NameStringPair configuration = 3;
private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ =
java.util.Collections.emptyList();
private void ensureConfigurationIsMutable() {
if (!((bitField0_ & 0x00000004) == 0x00000004)) {
configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_;
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
if (configurationBuilder_ == null) {
return java.util.Collections.unmodifiableList(configuration_);
} else {
return configurationBuilder_.getMessageList();
}
}
public int getConfigurationCount() {
if (configurationBuilder_ == null) {
return configuration_.size();
} else {
return configurationBuilder_.getCount();
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
if (configurationBuilder_ == null) {
return configuration_.get(index);
} else {
return configurationBuilder_.getMessage(index);
}
}
public Builder setConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.set(index, value);
onChanged();
} else {
configurationBuilder_.setMessage(index, value);
}
return this;
}
public Builder setConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.set(index, builderForValue.build());
onChanged();
} else {
configurationBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.add(value);
onChanged();
} else {
configurationBuilder_.addMessage(value);
}
return this;
}
public Builder addConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
if (configurationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConfigurationIsMutable();
configuration_.add(index, value);
onChanged();
} else {
configurationBuilder_.addMessage(index, value);
}
return this;
}
public Builder addConfiguration(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.add(builderForValue.build());
onChanged();
} else {
configurationBuilder_.addMessage(builderForValue.build());
}
return this;
}
public Builder addConfiguration(
int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.add(index, builderForValue.build());
onChanged();
} else {
configurationBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
public Builder addAllConfiguration(
java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
super.addAll(values, configuration_);
onChanged();
} else {
configurationBuilder_.addAllMessages(values);
}
return this;
}
public Builder clearConfiguration() {
if (configurationBuilder_ == null) {
configuration_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
configurationBuilder_.clear();
}
return this;
}
public Builder removeConfiguration(int index) {
if (configurationBuilder_ == null) {
ensureConfigurationIsMutable();
configuration_.remove(index);
onChanged();
} else {
configurationBuilder_.remove(index);
}
return this;
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder(
int index) {
return getConfigurationFieldBuilder().getBuilder(index);
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
int index) {
if (configurationBuilder_ == null) {
return configuration_.get(index); } else {
return configurationBuilder_.getMessageOrBuilder(index);
}
}
public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList() {
if (configurationBuilder_ != null) {
return configurationBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(configuration_);
}
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() {
return getConfigurationFieldBuilder().addBuilder(
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
}
public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder(
int index) {
return getConfigurationFieldBuilder().addBuilder(
index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
}
public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder>
getConfigurationBuilderList() {
return getConfigurationFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationFieldBuilder() {
if (configurationBuilder_ == null) {
configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>(
configuration_,
((bitField0_ & 0x00000004) == 0x00000004),
getParentForChildren(),
isClean());
configuration_ = null;
}
return configurationBuilder_;
}
// @@protoc_insertion_point(builder_scope:ColumnFamilySchema)
}
@ -11158,56 +11740,58 @@ public final class HBaseProtos {
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\013hbase.proto\"m\n\013TableSchema\022\014\n\004name\030\001 \001" +
"(\014\022#\n\nattributes\030\002 \003(\0132\017.BytesBytesPair\022" +
"+\n\016columnFamilies\030\003 \003(\0132\023.ColumnFamilySc" +
"hema\"G\n\022ColumnFamilySchema\022\014\n\004name\030\001 \002(\014" +
"\022#\n\nattributes\030\002 \003(\0132\017.BytesBytesPair\"s\n" +
"\nRegionInfo\022\020\n\010regionId\030\001 \002(\004\022\021\n\ttableNa" +
"me\030\002 \002(\014\022\020\n\010startKey\030\003 \001(\014\022\016\n\006endKey\030\004 \001" +
"(\014\022\017\n\007offline\030\005 \001(\010\022\r\n\005split\030\006 \001(\010\"\225\001\n\017R" +
"egionSpecifier\0222\n\004type\030\001 \002(\0162$.RegionSpe" +
"cifier.RegionSpecifierType\022\r\n\005value\030\002 \002(",
"\014\"?\n\023RegionSpecifierType\022\017\n\013REGION_NAME\020" +
"\001\022\027\n\023ENCODED_REGION_NAME\020\002\"\260\003\n\nRegionLoa" +
"d\022)\n\017regionSpecifier\030\001 \002(\0132\020.RegionSpeci" +
"fier\022\016\n\006stores\030\002 \001(\r\022\022\n\nstorefiles\030\003 \001(\r" +
"\022\037\n\027storeUncompressedSizeMB\030\004 \001(\r\022\027\n\017sto" +
"refileSizeMB\030\005 \001(\r\022\026\n\016memstoreSizeMB\030\006 \001" +
"(\r\022\034\n\024storefileIndexSizeMB\030\007 \001(\r\022\031\n\021read" +
"RequestsCount\030\010 \001(\004\022\032\n\022writeRequestsCoun" +
"t\030\t \001(\004\022\032\n\022totalCompactingKVs\030\n \001(\004\022\033\n\023c" +
"urrentCompactedKVs\030\013 \001(\004\022\027\n\017rootIndexSiz",
"eKB\030\014 \001(\r\022\036\n\026totalStaticIndexSizeKB\030\r \001(" +
"\r\022\036\n\026totalStaticBloomSizeKB\030\016 \001(\r\022\032\n\022com" +
"pleteSequenceId\030\017 \001(\004\"\372\001\n\nServerLoad\022\030\n\020" +
"numberOfRequests\030\001 \001(\r\022\035\n\025totalNumberOfR" +
"equests\030\002 \001(\r\022\022\n\nusedHeapMB\030\003 \001(\r\022\021\n\tmax" +
"HeapMB\030\004 \001(\r\022 \n\013regionLoads\030\005 \003(\0132\013.Regi" +
"onLoad\022\"\n\014coprocessors\030\006 \003(\0132\014.Coprocess" +
"or\022\027\n\017reportStartTime\030\007 \001(\004\022\025\n\rreportEnd" +
"Time\030\010 \001(\004\022\026\n\016infoServerPort\030\t \001(\r\"%\n\tTi" +
"meRange\022\014\n\004from\030\001 \001(\004\022\n\n\002to\030\002 \001(\004\"0\n\006Fil",
"ter\022\014\n\004name\030\001 \002(\t\022\030\n\020serializedFilter\030\002 " +
"\001(\014\"w\n\010KeyValue\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030\002" +
" \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttimestamp\030\004 \001" +
"(\004\022\031\n\007keyType\030\005 \001(\0162\010.KeyType\022\r\n\005value\030\006" +
" \001(\014\"?\n\nServerName\022\020\n\010hostName\030\001 \002(\t\022\014\n\004" +
"port\030\002 \001(\r\022\021\n\tstartCode\030\003 \001(\004\"\033\n\013Coproce" +
"ssor\022\014\n\004name\030\001 \002(\t\"-\n\016NameStringPair\022\014\n\004" +
"name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\",\n\rNameBytesPa" +
"ir\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014\"/\n\016Bytes" +
"BytesPair\022\r\n\005first\030\001 \002(\014\022\016\n\006second\030\002 \002(\014",
"\",\n\rNameInt64Pair\022\014\n\004name\030\001 \001(\t\022\r\n\005value" +
"\030\002 \001(\003\"\n\n\010EmptyMsg\"\032\n\007LongMsg\022\017\n\007longMsg" +
"\030\001 \002(\003*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n\rLESS_" +
"OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n\020" +
"GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO_OP" +
"\020\006*_\n\007KeyType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006D" +
"ELETE\020\010\022\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE_FAM" +
"ILY\020\016\022\014\n\007MAXIMUM\020\377\001B>\n*org.apache.hadoop" +
".hbase.protobuf.generatedB\013HBaseProtosH\001" +
"\240\001\001"
"\n\013hbase.proto\"\225\001\n\013TableSchema\022\014\n\004name\030\001 " +
"\001(\014\022#\n\nattributes\030\002 \003(\0132\017.BytesBytesPair" +
"\022+\n\016columnFamilies\030\003 \003(\0132\023.ColumnFamilyS" +
"chema\022&\n\rconfiguration\030\004 \003(\0132\017.NameStrin" +
"gPair\"o\n\022ColumnFamilySchema\022\014\n\004name\030\001 \002(" +
"\014\022#\n\nattributes\030\002 \003(\0132\017.BytesBytesPair\022&" +
"\n\rconfiguration\030\003 \003(\0132\017.NameStringPair\"s" +
"\n\nRegionInfo\022\020\n\010regionId\030\001 \002(\004\022\021\n\ttableN" +
"ame\030\002 \002(\014\022\020\n\010startKey\030\003 \001(\014\022\016\n\006endKey\030\004 " +
"\001(\014\022\017\n\007offline\030\005 \001(\010\022\r\n\005split\030\006 \001(\010\"\225\001\n\017",
"RegionSpecifier\0222\n\004type\030\001 \002(\0162$.RegionSp" +
"ecifier.RegionSpecifierType\022\r\n\005value\030\002 \002" +
"(\014\"?\n\023RegionSpecifierType\022\017\n\013REGION_NAME" +
"\020\001\022\027\n\023ENCODED_REGION_NAME\020\002\"\260\003\n\nRegionLo" +
"ad\022)\n\017regionSpecifier\030\001 \002(\0132\020.RegionSpec" +
"ifier\022\016\n\006stores\030\002 \001(\r\022\022\n\nstorefiles\030\003 \001(" +
"\r\022\037\n\027storeUncompressedSizeMB\030\004 \001(\r\022\027\n\017st" +
"orefileSizeMB\030\005 \001(\r\022\026\n\016memstoreSizeMB\030\006 " +
"\001(\r\022\034\n\024storefileIndexSizeMB\030\007 \001(\r\022\031\n\021rea" +
"dRequestsCount\030\010 \001(\004\022\032\n\022writeRequestsCou",
"nt\030\t \001(\004\022\032\n\022totalCompactingKVs\030\n \001(\004\022\033\n\023" +
"currentCompactedKVs\030\013 \001(\004\022\027\n\017rootIndexSi" +
"zeKB\030\014 \001(\r\022\036\n\026totalStaticIndexSizeKB\030\r \001" +
"(\r\022\036\n\026totalStaticBloomSizeKB\030\016 \001(\r\022\032\n\022co" +
"mpleteSequenceId\030\017 \001(\004\"\372\001\n\nServerLoad\022\030\n" +
"\020numberOfRequests\030\001 \001(\r\022\035\n\025totalNumberOf" +
"Requests\030\002 \001(\r\022\022\n\nusedHeapMB\030\003 \001(\r\022\021\n\tma" +
"xHeapMB\030\004 \001(\r\022 \n\013regionLoads\030\005 \003(\0132\013.Reg" +
"ionLoad\022\"\n\014coprocessors\030\006 \003(\0132\014.Coproces" +
"sor\022\027\n\017reportStartTime\030\007 \001(\004\022\025\n\rreportEn",
"dTime\030\010 \001(\004\022\026\n\016infoServerPort\030\t \001(\r\"%\n\tT" +
"imeRange\022\014\n\004from\030\001 \001(\004\022\n\n\002to\030\002 \001(\004\"0\n\006Fi" +
"lter\022\014\n\004name\030\001 \002(\t\022\030\n\020serializedFilter\030\002" +
" \001(\014\"w\n\010KeyValue\022\013\n\003row\030\001 \002(\014\022\016\n\006family\030" +
"\002 \002(\014\022\021\n\tqualifier\030\003 \002(\014\022\021\n\ttimestamp\030\004 " +
"\001(\004\022\031\n\007keyType\030\005 \001(\0162\010.KeyType\022\r\n\005value\030" +
"\006 \001(\014\"?\n\nServerName\022\020\n\010hostName\030\001 \002(\t\022\014\n" +
"\004port\030\002 \001(\r\022\021\n\tstartCode\030\003 \001(\004\"\033\n\013Coproc" +
"essor\022\014\n\004name\030\001 \002(\t\"-\n\016NameStringPair\022\014\n" +
"\004name\030\001 \002(\t\022\r\n\005value\030\002 \002(\t\",\n\rNameBytesP",
"air\022\014\n\004name\030\001 \002(\t\022\r\n\005value\030\002 \001(\014\"/\n\016Byte" +
"sBytesPair\022\r\n\005first\030\001 \002(\014\022\016\n\006second\030\002 \002(" +
"\014\",\n\rNameInt64Pair\022\014\n\004name\030\001 \001(\t\022\r\n\005valu" +
"e\030\002 \001(\003\"\n\n\010EmptyMsg\"\032\n\007LongMsg\022\017\n\007longMs" +
"g\030\001 \002(\003*r\n\013CompareType\022\010\n\004LESS\020\000\022\021\n\rLESS" +
"_OR_EQUAL\020\001\022\t\n\005EQUAL\020\002\022\r\n\tNOT_EQUAL\020\003\022\024\n" +
"\020GREATER_OR_EQUAL\020\004\022\013\n\007GREATER\020\005\022\t\n\005NO_O" +
"P\020\006*_\n\007KeyType\022\013\n\007MINIMUM\020\000\022\007\n\003PUT\020\004\022\n\n\006" +
"DELETE\020\010\022\021\n\rDELETE_COLUMN\020\014\022\021\n\rDELETE_FA" +
"MILY\020\016\022\014\n\007MAXIMUM\020\377\001B>\n*org.apache.hadoo",
"p.hbase.protobuf.generatedB\013HBaseProtosH" +
"\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@ -11219,7 +11803,7 @@ public final class HBaseProtos {
internal_static_TableSchema_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_TableSchema_descriptor,
new java.lang.String[] { "Name", "Attributes", "ColumnFamilies", },
new java.lang.String[] { "Name", "Attributes", "ColumnFamilies", "Configuration", },
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.class,
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema.Builder.class);
internal_static_ColumnFamilySchema_descriptor =
@ -11227,7 +11811,7 @@ public final class HBaseProtos {
internal_static_ColumnFamilySchema_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ColumnFamilySchema_descriptor,
new java.lang.String[] { "Name", "Attributes", },
new java.lang.String[] { "Name", "Attributes", "Configuration", },
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.class,
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder.class);
internal_static_RegionInfo_descriptor =

View File

@ -31,6 +31,7 @@ message TableSchema {
optional bytes name = 1;
repeated BytesBytesPair attributes = 2;
repeated ColumnFamilySchema columnFamilies = 3;
repeated NameStringPair configuration = 4;
}
/**
@ -40,6 +41,7 @@ message TableSchema {
message ColumnFamilySchema {
required bytes name = 1;
repeated BytesBytesPair attributes = 2;
repeated NameStringPair configuration = 3;
}
/**

View File

@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
@ -67,7 +68,8 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
// Version 8 -- reintroduction of bloom filters, changed from boolean to enum
// Version 9 -- add data block encoding
// Version 10 -- change metadata to standard type.
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte) 10;
// Version 11 -- add column family level configuration.
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte) 11;
// These constants are used as FileInfo keys
public static final String COMPRESSION = "COMPRESSION";
@ -221,9 +223,16 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
private byte [] name;
// Column metadata
protected final Map<ImmutableBytesWritable, ImmutableBytesWritable> values =
private final Map<ImmutableBytesWritable, ImmutableBytesWritable> values =
new HashMap<ImmutableBytesWritable,ImmutableBytesWritable>();
/**
* A map which holds the configuration specific to the column family.
* The keys of the map have the same names as config keys and override the defaults with
* cf-specific settings. Example usage may be for compactions, etc.
*/
private final Map<String, String> configuration = new HashMap<String, String>();
/*
* Cache the max versions rather than calculate it every time.
*/
@ -278,6 +287,9 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
desc.values.entrySet()) {
this.values.put(e.getKey(), e.getValue());
}
for (Map.Entry<String, String> e : desc.configuration.entrySet()) {
this.configuration.put(e.getKey(), e.getValue());
}
setMaxVersions(desc.getMaxVersions());
}
@ -950,6 +962,21 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
}
s.append('}');
}
if (!configuration.isEmpty()) {
s.append(", ");
s.append(HConstants.CONFIGURATION).append(" => ");
s.append('{');
boolean printCommaForConfiguration = false;
for (Map.Entry<String, String> e : configuration.entrySet()) {
if (printCommaForConfiguration) s.append(", ");
printCommaForConfiguration = true;
s.append('\'').append(e.getKey()).append('\'');
s.append(" => ");
s.append('\'').append(e.getValue()).append('\'');
}
s.append("}");
}
return s;
}
@ -982,6 +1009,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
int result = Bytes.hashCode(this.name);
result ^= Byte.valueOf(COLUMN_DESCRIPTOR_VERSION).hashCode();
result ^= values.hashCode();
result ^= configuration.hashCode();
return result;
}
@ -1052,6 +1080,19 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
String value = getValue(HConstants.VERSIONS);
this.cachedMaxVersions = (value != null)?
Integer.valueOf(value).intValue(): DEFAULT_VERSIONS;
if (version > 10) {
configuration.clear();
int numConfigs = in.readInt();
for (int i = 0; i < numConfigs; i++) {
ImmutableBytesWritable key = new ImmutableBytesWritable();
ImmutableBytesWritable val = new ImmutableBytesWritable();
key.readFields(in);
val.readFields(in);
configuration.put(
Bytes.toString(key.get(), key.getOffset(), key.getLength()),
Bytes.toString(val.get(), val.getOffset(), val.getLength()));
}
}
}
}
@ -1068,6 +1109,11 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
e.getKey().write(out);
e.getValue().write(out);
}
out.writeInt(configuration.size());
for (Map.Entry<String, String> e : configuration.entrySet()) {
new ImmutableBytesWritable(Bytes.toBytes(e.getKey())).write(out);
new ImmutableBytesWritable(Bytes.toBytes(e.getValue())).write(out);
}
}
// Comparable
@ -1082,6 +1128,13 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
else if (result > 0)
result = 1;
}
if (result == 0) {
result = this.configuration.hashCode() - o.configuration.hashCode();
if (result < 0)
result = -1;
else if (result > 0)
result = 1;
}
return result;
}
@ -1125,6 +1178,9 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
for (BytesBytesPair a: cfs.getAttributesList()) {
hcd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
}
for (NameStringPair a: cfs.getConfigurationList()) {
hcd.setConfiguration(a.getName(), a.getValue());
}
return hcd;
}
@ -1140,6 +1196,47 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
aBuilder.setSecond(ByteString.copyFrom(e.getValue().get()));
builder.addAttributes(aBuilder.build());
}
for (Map.Entry<String, String> e : this.configuration.entrySet()) {
NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
aBuilder.setName(e.getKey());
aBuilder.setValue(e.getValue());
builder.addConfiguration(aBuilder.build());
}
return builder.build();
}
/**
* Getter for accessing the configuration value by key.
*/
public String getConfigurationValue(String key) {
return configuration.get(key);
}
/**
* Getter for fetching an unmodifiable {@link #configuration} map.
*/
public Map<String, String> getConfiguration() {
// shallow pointer copy
return Collections.unmodifiableMap(configuration);
}
/**
* Setter for storing a configuration setting in {@link #configuration} map.
* @param key Config key. Same as XML config key e.g. hbase.something.or.other.
* @param value String value. If null, removes the configuration.
*/
public void setConfiguration(String key, String value) {
if (value == null) {
removeConfiguration(key);
} else {
configuration.put(key, value);
}
}
/**
* Remove a configuration setting represented by the key from the {@link #configuration} map.
*/
public void removeConfiguration(final String key) {
configuration.remove(key);
}
}

View File

@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
@ -65,8 +66,9 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* Version 4 adds indexes
* Version 5 removed transactional pollution -- e.g. indexes
* Version 6 changed metadata to BytesBytesPair in PB
* Version 7 adds table-level configuration
*/
private static final byte TABLE_DESCRIPTOR_VERSION = 6;
private static final byte TABLE_DESCRIPTOR_VERSION = 7;
private byte [] name = HConstants.EMPTY_BYTE_ARRAY;
@ -77,9 +79,16 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* includes values like IS_ROOT, IS_META, DEFERRED_LOG_FLUSH, SPLIT_POLICY,
* MAX_FILE_SIZE, READONLY, MEMSTORE_FLUSHSIZE etc...
*/
protected final Map<ImmutableBytesWritable, ImmutableBytesWritable> values =
private final Map<ImmutableBytesWritable, ImmutableBytesWritable> values =
new HashMap<ImmutableBytesWritable, ImmutableBytesWritable>();
/**
* A map which holds the configuration specific to the table.
* The keys of the map have the same names as config keys and override the defaults with
* table-specific settings. Example usage may be for compactions, etc.
*/
private final Map<String, String> configuration = new HashMap<String, String>();
public static final String SPLIT_POLICY = "SPLIT_POLICY";
/**
@ -236,7 +245,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
}
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> entry:
values.entrySet()) {
this.values.put(entry.getKey(), entry.getValue());
setValue(entry.getKey(), entry.getValue());
}
}
@ -295,7 +304,10 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
}
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
desc.values.entrySet()) {
this.values.put(e.getKey(), e.getValue());
setValue(e.getKey(), e.getValue());
}
for (Map.Entry<String, String> e : desc.configuration.entrySet()) {
this.configuration.put(e.getKey(), e.getValue());
}
}
@ -333,7 +345,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
*/
protected void setRootRegion(boolean isRoot) {
// TODO: Make the value a boolean rather than String of boolean.
values.put(IS_ROOT_KEY, isRoot? TRUE: FALSE);
setValue(IS_ROOT_KEY, isRoot? TRUE: FALSE);
}
/**
@ -374,7 +386,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* <code> .META. </code> region
*/
protected void setMetaRegion(boolean isMeta) {
values.put(IS_META_KEY, isMeta? TRUE: FALSE);
setValue(IS_META_KEY, isMeta? TRUE: FALSE);
}
/**
@ -487,7 +499,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* @see #values
*/
public void setValue(byte[] key, byte[] value) {
setValue(new ImmutableBytesWritable(key), value);
setValue(new ImmutableBytesWritable(key), new ImmutableBytesWritable(value));
}
/*
@ -495,8 +507,8 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* @param value The value.
*/
private void setValue(final ImmutableBytesWritable key,
final byte[] value) {
values.put(key, new ImmutableBytesWritable(value));
final String value) {
setValue(key, new ImmutableBytesWritable(Bytes.toBytes(value)));
}
/*
@ -517,7 +529,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
*/
public void setValue(String key, String value) {
if (value == null) {
remove(Bytes.toBytes(key));
remove(key);
} else {
setValue(Bytes.toBytes(key), Bytes.toBytes(value));
}
@ -529,8 +541,8 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* @param key Key whose key and value we're to remove from HTableDescriptor
* parameters.
*/
public void remove(final byte [] key) {
values.remove(new ImmutableBytesWritable(key));
public void remove(final String key) {
remove(new ImmutableBytesWritable(Bytes.toBytes(key)));
}
/**
@ -539,8 +551,8 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* @param key Key whose key and value we're to remove from HTableDescriptor
* parameters.
*/
public void remove(final String key) {
remove(Bytes.toBytes(key));
public void remove(ImmutableBytesWritable key) {
values.remove(key);
}
/**
@ -673,7 +685,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* before a split is triggered.
*/
public void setMaxFileSize(long maxFileSize) {
setValue(MAX_FILESIZE_KEY, Bytes.toBytes(Long.toString(maxFileSize)));
setValue(MAX_FILESIZE_KEY, Long.toString(maxFileSize));
}
/**
@ -698,8 +710,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* @param memstoreFlushSize memory cache flush size for each hregion
*/
public void setMemStoreFlushSize(long memstoreFlushSize) {
setValue(MEMSTORE_FLUSHSIZE_KEY,
Bytes.toBytes(Long.toString(memstoreFlushSize)));
setValue(MEMSTORE_FLUSHSIZE_KEY, Long.toString(memstoreFlushSize));
}
/**
@ -757,13 +768,13 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
// step 1: set partitioning and pruning
Set<ImmutableBytesWritable> reservedKeys = new TreeSet<ImmutableBytesWritable>();
Set<ImmutableBytesWritable> configKeys = new TreeSet<ImmutableBytesWritable>();
Set<ImmutableBytesWritable> userKeys = new TreeSet<ImmutableBytesWritable>();
for (ImmutableBytesWritable k : values.keySet()) {
if (k == null || k.get() == null) continue;
String key = Bytes.toString(k.get());
// in this section, print out reserved keywords + coprocessor info
if (!RESERVED_KEYWORDS.contains(k) && !key.startsWith("coprocessor$")) {
configKeys.add(k);
userKeys.add(k);
continue;
}
// only print out IS_ROOT/IS_META if true
@ -780,50 +791,67 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
}
// early exit optimization
if (reservedKeys.isEmpty() && configKeys.isEmpty()) return s;
boolean hasAttributes = !reservedKeys.isEmpty() || !userKeys.isEmpty();
if (!hasAttributes && configuration.isEmpty()) return s;
// step 2: printing
s.append(", {TABLE_ATTRIBUTES => {");
s.append(", {");
// step 2: printing attributes
if (hasAttributes) {
s.append("TABLE_ATTRIBUTES => {");
// print all reserved keys first
boolean printCommaForAttr = false;
for (ImmutableBytesWritable k : reservedKeys) {
String key = Bytes.toString(k.get());
String value = Bytes.toString(values.get(k).get());
if (printCommaForAttr) s.append(", ");
printCommaForAttr = true;
s.append(key);
s.append(" => ");
s.append('\'').append(value).append('\'');
}
if (!configKeys.isEmpty()) {
// print all non-reserved, advanced config keys as a separate subset
if (printCommaForAttr) s.append(", ");
printCommaForAttr = true;
s.append(HConstants.METADATA).append(" => ");
s.append("{");
boolean printCommaForCfg = false;
for (ImmutableBytesWritable k : configKeys) {
// print all reserved keys first
boolean printCommaForAttr = false;
for (ImmutableBytesWritable k : reservedKeys) {
String key = Bytes.toString(k.get());
String value = Bytes.toString(values.get(k).get());
if (printCommaForCfg) s.append(", ");
printCommaForCfg = true;
s.append('\'').append(key).append('\'');
if (printCommaForAttr) s.append(", ");
printCommaForAttr = true;
s.append(key);
s.append(" => ");
s.append('\'').append(value).append('\'');
}
s.append("}");
if (!userKeys.isEmpty()) {
// print all non-reserved, advanced config keys as a separate subset
if (printCommaForAttr) s.append(", ");
printCommaForAttr = true;
s.append(HConstants.METADATA).append(" => ");
s.append("{");
boolean printCommaForCfg = false;
for (ImmutableBytesWritable k : userKeys) {
String key = Bytes.toString(k.get());
String value = Bytes.toString(values.get(k).get());
if (printCommaForCfg) s.append(", ");
printCommaForCfg = true;
s.append('\'').append(key).append('\'');
s.append(" => ");
s.append('\'').append(value).append('\'');
}
s.append("}");
}
}
s.append("}}"); // end METHOD
// step 3: printing all configuration:
if (!configuration.isEmpty()) {
if (hasAttributes) {
s.append(", ");
}
s.append(HConstants.CONFIGURATION).append(" => ");
s.append('{');
boolean printCommaForConfig = false;
for (Map.Entry<String, String> e : configuration.entrySet()) {
if (printCommaForConfig) s.append(", ");
printCommaForConfig = true;
s.append('\'').append(e.getKey()).append('\'');
s.append(" => ");
s.append('\'').append(e.getValue()).append('\'');
}
s.append("}");
}
s.append("}"); // end METHOD
return s;
}
public static Map<String, String> getDefaultValues() {
return Collections.unmodifiableMap(DEFAULT_VALUES);
}
/**
* Compare the contents of the descriptor with another one passed as a parameter.
* Checks if the obj passed is an instance of HTableDescriptor, if yes then the
@ -860,6 +888,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
}
}
result ^= values.hashCode();
result ^= configuration.hashCode();
return result;
}
@ -880,13 +909,14 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
setRootRegion(in.readBoolean());
setMetaRegion(in.readBoolean());
values.clear();
configuration.clear();
int numVals = in.readInt();
for (int i = 0; i < numVals; i++) {
ImmutableBytesWritable key = new ImmutableBytesWritable();
ImmutableBytesWritable value = new ImmutableBytesWritable();
key.readFields(in);
value.readFields(in);
values.put(key, value);
setValue(key, value);
}
families.clear();
int numFamilies = in.readInt();
@ -895,8 +925,17 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
c.readFields(in);
families.put(c.getName(), c);
}
if (version < 4) {
return;
if (version >= 7) {
int numConfigs = in.readInt();
for (int i = 0; i < numConfigs; i++) {
ImmutableBytesWritable key = new ImmutableBytesWritable();
ImmutableBytesWritable value = new ImmutableBytesWritable();
key.readFields(in);
value.readFields(in);
configuration.put(
Bytes.toString(key.get(), key.getOffset(), key.getLength()),
Bytes.toString(value.get(), value.getOffset(), value.getLength()));
}
}
}
@ -925,6 +964,11 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
HColumnDescriptor family = it.next();
family.write(out);
}
out.writeInt(configuration.size());
for (Map.Entry<String, String> e : configuration.entrySet()) {
new ImmutableBytesWritable(Bytes.toBytes(e.getKey())).write(out);
new ImmutableBytesWritable(Bytes.toBytes(e.getValue())).write(out);
}
}
// Comparable
@ -963,6 +1007,13 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
else if (result > 0)
result = 1;
}
if (result == 0) {
result = this.configuration.hashCode() - other.configuration.hashCode();
if (result < 0)
result = -1;
else if (result > 0)
result = 1;
}
return result;
}
@ -1170,7 +1221,7 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
}
// if we found a match, remove it
if (match != null)
this.values.remove(match);
remove(match);
}
/**
@ -1218,9 +1269,9 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
@Deprecated
public void setOwnerString(String ownerString) {
if (ownerString != null) {
setValue(OWNER_KEY, Bytes.toBytes(ownerString));
setValue(OWNER_KEY, ownerString);
} else {
values.remove(OWNER_KEY);
remove(OWNER_KEY);
}
}
@ -1281,6 +1332,12 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
for (HColumnDescriptor hcd: getColumnFamilies()) {
builder.addColumnFamilies(hcd.convert());
}
for (Map.Entry<String, String> e : this.configuration.entrySet()) {
NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
aBuilder.setName(e.getKey());
aBuilder.setValue(e.getValue());
builder.addConfiguration(aBuilder.build());
}
return builder.build();
}
@ -1299,6 +1356,44 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
for (BytesBytesPair a: ts.getAttributesList()) {
htd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
}
for (NameStringPair a: ts.getConfigurationList()) {
htd.setConfiguration(a.getName(), a.getValue());
}
return htd;
}
/**
* Getter for accessing the configuration value by key
*/
public String getConfigurationValue(String key) {
return configuration.get(key);
}
/**
* Getter for fetching an unmodifiable {@link #configuration} map.
*/
public Map<String, String> getConfiguration() {
// shallow pointer copy
return Collections.unmodifiableMap(configuration);
}
/**
* Setter for storing a configuration setting in {@link #configuration} map.
* @param key Config key. Same as XML config key e.g. hbase.something.or.other.
* @param value String value. If null, removes the setting.
*/
public void setConfiguration(String key, String value) {
if (value == null) {
removeConfiguration(key);
} else {
configuration.put(key, value);
}
}
/**
* Remove a config setting represented by the key from the {@link #configuration} map
*/
public void removeConfiguration(final String key) {
configuration.remove(key);
}
}

View File

@ -133,7 +133,7 @@ public final class Constraints {
}
// now remove all the keys we found
for (ImmutableBytesWritable key : keys) {
desc.remove(key.get());
desc.remove(key);
}
}

View File

@ -440,6 +440,7 @@ public class HRegion implements HeapSize { // , Writable{
this.baseConf = confParam;
this.conf = new CompoundConfiguration()
.add(confParam)
.addStringMap(htd.getConfiguration())
.addWritableMap(htd.getValues());
this.rowLockWaitDuration = conf.getInt("hbase.rowlock.wait.duration",
DEFAULT_ROWLOCK_WAIT_DURATION);

View File

@ -184,8 +184,12 @@ public class HStore implements Store, StoreConfiguration {
this.region = region;
this.family = family;
// 'conf' renamed to 'confParam' b/c we use this.conf in the constructor
// CompoundConfiguration will look for keys in reverse order of addition, so we'd
// add global config first, then table and cf overrides, then cf metadata.
this.conf = new CompoundConfiguration()
.add(confParam)
.addStringMap(region.getTableDesc().getConfiguration())
.addStringMap(family.getConfiguration())
.addWritableMap(family.getValues());
this.blocksize = family.getBlocksize();

View File

@ -56,6 +56,7 @@ module HBaseConstants
SPLITS_FILE = 'SPLITS_FILE'
SPLITALGO = 'SPLITALGO'
NUMREGIONS = 'NUMREGIONS'
CONFIGURATION = org.apache.hadoop.hbase.HConstants::CONFIGURATION
# Load constants from hbase java API
def self.promote_constants(constants)

View File

@ -260,6 +260,7 @@ module Hbase
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if arg[MEMSTORE_FLUSHSIZE]
htd.setDeferredLogFlush(JBoolean.valueOf(arg.delete(DEFERRED_LOG_FLUSH))) if arg[DEFERRED_LOG_FLUSH]
set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
set_descriptor_config(htd, arg.delete(CONFIGURATION)) if arg[CONFIGURATION]
arg.each_key do |ignored_key|
puts("An argument ignored (unknown or overridden): %s" % [ ignored_key ])
@ -420,7 +421,7 @@ module Hbase
if (htd.getValue(name) == nil)
raise ArgumentError, "Can not find attribute: #{name}"
end
htd.remove(name.to_java_bytes)
htd.remove(name)
@admin.modifyTable(table_name.to_java_bytes, htd)
# Unknown method
else
@ -446,11 +447,12 @@ module Hbase
# 3) Some args for the table, optionally with METHOD => table_att (deprecated)
raise(ArgumentError, "NAME argument in an unexpected place") if name
htd.setOwnerString(arg.delete(OWNER)) if arg[OWNER]
set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
htd.setMaxFileSize(JLong.valueOf(arg.delete(MAX_FILESIZE))) if arg[MAX_FILESIZE]
htd.setReadOnly(JBoolean.valueOf(arg.delete(READONLY))) if arg[READONLY]
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if arg[MEMSTORE_FLUSHSIZE]
htd.setDeferredLogFlush(JBoolean.valueOf(arg.delete(DEFERRED_LOG_FLUSH))) if arg[DEFERRED_LOG_FLUSH]
set_user_metadata(htd, arg.delete(METADATA)) if arg[METADATA]
set_descriptor_config(htd, arg.delete(CONFIGURATION)) if arg[CONFIGURATION]
# set a coprocessor attribute
valid_coproc_keys = []
@ -613,6 +615,7 @@ module Hbase
end
set_user_metadata(family, arg.delete(METADATA)) if arg[METADATA]
set_descriptor_config(family, arg.delete(CONFIGURATION)) if arg[CONFIGURATION]
arg.each_key do |unknown_key|
puts("Unknown argument ignored for column family %s: %s" % [name, unknown_key])
@ -652,5 +655,14 @@ module Hbase
end
end
# Apply config specific to a table/column to its descriptor
def set_descriptor_config(descriptor, config)
raise(ArgumentError, "#{CONFIGURATION} must be a Hash type") unless config.kind_of?(Hash)
for k,v in config
v = v.to_s unless v.nil?
descriptor.setConfiguration(k, v)
end
end
end
end

View File

@ -79,4 +79,18 @@ public class TestHColumnDescriptor {
assertEquals("Family name can not be empty", e.getLocalizedMessage());
}
}
/**
* Test that we add and remove strings from configuration properly.
*/
@Test
public void testAddGetRemoveConfiguration() throws Exception {
HColumnDescriptor desc = new HColumnDescriptor("foo");
String key = "Some";
String value = "value";
desc.setConfiguration(key, value);
assertEquals(value, desc.getConfigurationValue(key));
desc.removeConfiguration(key);
assertEquals(null, desc.getConfigurationValue(key));
}
}

View File

@ -100,4 +100,18 @@ public class TestHTableDescriptor {
desc.setMemStoreFlushSize(1111L);
assertEquals(1111L, desc.getMemStoreFlushSize());
}
/**
* Test that we add and remove strings from configuration properly.
*/
@Test
public void testAddGetRemoveConfiguration() throws Exception {
HTableDescriptor desc = new HTableDescriptor("table");
String key = "Some";
String value = "value";
desc.setConfiguration(key, value);
assertEquals(value, desc.getConfigurationValue(key));
desc.removeConfiguration(key);
assertEquals(null, desc.getConfigurationValue(key));
}
}

View File

@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
@ -137,6 +138,12 @@ public class TestStore extends TestCase {
private void init(String methodName, Configuration conf,
HColumnDescriptor hcd) throws IOException {
HTableDescriptor htd = new HTableDescriptor(table);
init(methodName, conf, htd, hcd);
}
private void init(String methodName, Configuration conf, HTableDescriptor htd,
HColumnDescriptor hcd) throws IOException {
//Setting up a Store
Path basedir = new Path(DIR+methodName);
String logName = "logs";
@ -146,7 +153,6 @@ public class TestStore extends TestCase {
fs.delete(logdir, true);
HTableDescriptor htd = new HTableDescriptor(table);
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
HLog hlog = HLogFactory.createHLog(fs, basedir, logName, conf);
@ -817,5 +823,34 @@ public class TestStore extends TestCase {
store.getHRegion().clearSplit_TESTS_ONLY();
}
public void testStoreUsesConfigurationFromHcdAndHtd() throws Exception {
final String CONFIG_KEY = "hbase.regionserver.thread.compaction.throttle";
long anyValue = 10;
// We'll check that it uses correct config and propagates it appropriately by going thru
// the simplest "real" path I can find - "throttleCompaction", which just checks whether
// a number we pass in is higher than some config value, inside compactionPolicy.
Configuration conf = HBaseConfiguration.create();
conf.setLong(CONFIG_KEY, anyValue);
init(getName() + "-xml", conf);
assertTrue(store.throttleCompaction(anyValue + 1));
assertFalse(store.throttleCompaction(anyValue));
// HTD overrides XML.
--anyValue;
HTableDescriptor htd = new HTableDescriptor(table);
HColumnDescriptor hcd = new HColumnDescriptor(family);
htd.setConfiguration(CONFIG_KEY, Long.toString(anyValue));
init(getName() + "-htd", conf, htd, hcd);
assertTrue(store.throttleCompaction(anyValue + 1));
assertFalse(store.throttleCompaction(anyValue));
// HCD overrides them both.
--anyValue;
hcd.setConfiguration(CONFIG_KEY, Long.toString(anyValue));
init(getName() + "-hcd", conf, htd, hcd);
assertTrue(store.throttleCompaction(anyValue + 1));
assertFalse(store.throttleCompaction(anyValue));
}
}