HBASE-7569 change HTD/HCD to use standard byte-byte pair for metadata

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1433289 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-01-15 05:07:56 +00:00
parent 238bcb34da
commit a380af2543
4 changed files with 144 additions and 1094 deletions

View File

@ -29,11 +29,7 @@ option optimize_for = SPEED;
*/
message TableSchema {
optional bytes name = 1;
message Attribute {
required bytes name = 1;
required bytes value = 2;
}
repeated Attribute attributes = 2;
repeated BytesBytesPair attributes = 2;
repeated ColumnFamilySchema columnFamilies = 3;
}
@ -43,11 +39,7 @@ message TableSchema {
*/
message ColumnFamilySchema {
required bytes name = 1;
message Attribute {
required bytes name = 1;
required bytes value = 2;
}
repeated Attribute attributes = 2;
repeated BytesBytesPair attributes = 2;
}
/**

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.util.Bytes;
@ -58,14 +59,15 @@ import com.google.protobuf.InvalidProtocolBufferException;
public class HColumnDescriptor implements WritableComparable<HColumnDescriptor> {
// For future backward compatibility
// Version 3 was when column names become byte arrays and when we picked up
// Version 3 was when column names become byte arrays and when we picked up
// Time-to-live feature. Version 4 was when we moved to byte arrays, HBASE-82.
// Version 5 was when bloom filter descriptors were removed.
// Version 6 adds metadata as a map where keys and values are byte[].
// Version 7 -- add new compression and hfile blocksize to HColumnDescriptor (HBASE-1217)
// Version 8 -- reintroduction of bloom filters, changed from boolean to enum
// Version 9 -- add data block encoding
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte) 9;
// Version 5 was when bloom filter descriptors were removed.
// Version 6 adds metadata as a map where keys and values are byte[].
// Version 7 -- add new compression and hfile blocksize to HColumnDescriptor (HBASE-1217)
// Version 8 -- reintroduction of bloom filters, changed from boolean to enum
// Version 9 -- add data block encoding
// Version 10 -- change metadata to standard type.
private static final byte COLUMN_DESCRIPTOR_VERSION = (byte) 10;
// These constants are used as FileInfo keys
public static final String COMPRESSION = "COMPRESSION";
@ -1120,8 +1122,8 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
// unrelated-looking test failures that are hard to trace back to here.
HColumnDescriptor hcd = new HColumnDescriptor();
hcd.name = cfs.getName().toByteArray();
for (ColumnFamilySchema.Attribute a: cfs.getAttributesList()) {
hcd.setValue(a.getName().toByteArray(), a.getValue().toByteArray());
for (BytesBytesPair a: cfs.getAttributesList()) {
hcd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
}
return hcd;
}
@ -1133,9 +1135,9 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
builder.setName(ByteString.copyFrom(getName()));
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e: this.values.entrySet()) {
ColumnFamilySchema.Attribute.Builder aBuilder = ColumnFamilySchema.Attribute.newBuilder();
aBuilder.setName(ByteString.copyFrom(e.getKey().get()));
aBuilder.setValue(ByteString.copyFrom(e.getValue().get()));
BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
aBuilder.setFirst(ByteString.copyFrom(e.getKey().get()));
aBuilder.setSecond(ByteString.copyFrom(e.getValue().get()));
builder.addAttributes(aBuilder.build());
}
return builder.build();

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
import org.apache.hadoop.hbase.security.User;
@ -63,8 +64,9 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
* Version 3 adds metadata as a map where keys and values are byte[].
* Version 4 adds indexes
* Version 5 removed transactional pollution -- e.g. indexes
* Version 6 changed metadata to BytesBytesPair in PB
*/
private static final byte TABLE_DESCRIPTOR_VERSION = 5;
private static final byte TABLE_DESCRIPTOR_VERSION = 6;
private byte [] name = HConstants.EMPTY_BYTE_ARRAY;
@ -1271,9 +1273,9 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
TableSchema.Builder builder = TableSchema.newBuilder();
builder.setName(ByteString.copyFrom(getName()));
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e: this.values.entrySet()) {
TableSchema.Attribute.Builder aBuilder = TableSchema.Attribute.newBuilder();
aBuilder.setName(ByteString.copyFrom(e.getKey().get()));
aBuilder.setValue(ByteString.copyFrom(e.getValue().get()));
BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
aBuilder.setFirst(ByteString.copyFrom(e.getKey().get()));
aBuilder.setSecond(ByteString.copyFrom(e.getValue().get()));
builder.addAttributes(aBuilder.build());
}
for (HColumnDescriptor hcd: getColumnFamilies()) {
@ -1294,8 +1296,8 @@ public class HTableDescriptor implements WritableComparable<HTableDescriptor> {
hcds[index++] = HColumnDescriptor.convert(cfs);
}
HTableDescriptor htd = new HTableDescriptor(ts.getName().toByteArray(), hcds);
for (TableSchema.Attribute a: ts.getAttributesList()) {
htd.setValue(a.getName().toByteArray(), a.getValue().toByteArray());
for (BytesBytesPair a: ts.getAttributesList()) {
htd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
}
return htd;
}