HBASE-25519 BLOCKSIZE needs to support pretty print (#2894)

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
Baiqiang Zhao 2021-02-09 05:22:45 +08:00 committed by GitHub
parent ce9c9b453f
commit ca672aceee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 48 additions and 5 deletions

View File

@ -312,6 +312,8 @@ public class ColumnFamilyDescriptorBuilder {
switch (key) {
case TTL:
return Unit.TIME_INTERVAL;
case BLOCKSIZE:
return Unit.BYTE;
default:
return Unit.NONE;
}
@ -417,6 +419,11 @@ public class ColumnFamilyDescriptorBuilder {
return this;
}
public ColumnFamilyDescriptorBuilder setBlocksize(String value) throws HBaseException {
desc.setBlocksize(value);
return this;
}
public ColumnFamilyDescriptorBuilder setBloomFilterType(final BloomType value) {
desc.setBloomFilterType(value);
return this;
@ -769,6 +776,11 @@ public class ColumnFamilyDescriptorBuilder {
return setValue(BLOCKSIZE_BYTES, Integer.toString(s));
}
public ModifyableColumnFamilyDescriptor setBlocksize(String blocksize) throws HBaseException {
return setBlocksize(Integer.parseInt(PrettyPrinter.
valueOf(blocksize, PrettyPrinter.Unit.BYTE)));
}
@Override
public Compression.Algorithm getCompressionType() {
return getStringOrDefault(COMPRESSION_BYTES,

View File

@ -184,6 +184,34 @@ public class TestColumnFamilyDescriptorBuilder {
Assert.assertEquals(43282800, builder.build().getTimeToLive());
}
@Test
public void testSetBlocksize() throws HBaseException {
String blocksize;
ColumnFamilyDescriptorBuilder builder =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("foo"));
blocksize = "131072";
builder.setBlocksize(blocksize);
assertEquals(131072, builder.build().getBlocksize());
blocksize = "100KB";
builder.setBlocksize(blocksize);
assertEquals(102400, builder.build().getBlocksize());
blocksize = "1MB";
builder.setBlocksize(blocksize);
assertEquals(1048576, builder.build().getBlocksize());
// ignore case
blocksize = "64kb 512B";
builder.setBlocksize(blocksize);
assertEquals(66048, builder.build().getBlocksize());
blocksize = "66048 B (64KB 512B)";
builder.setBlocksize(blocksize);
assertEquals(66048, builder.build().getBlocksize());
}
/**
* Test for verifying the ColumnFamilyDescriptorBuilder's default values so that backward
* compatibility with hbase-1.x can be mantained (see HBASE-24981).

View File

@ -339,13 +339,15 @@ public class TestTableDescriptorBuilder {
public void testStringCustomizedValues() throws HBaseException {
byte[] familyName = Bytes.toBytes("cf");
ColumnFamilyDescriptor hcd =
ColumnFamilyDescriptorBuilder.newBuilder(familyName).setBlocksize(1000).build();
TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
ColumnFamilyDescriptorBuilder.newBuilder(familyName).setBlocksize(131072).build();
TableDescriptor htd = TableDescriptorBuilder
.newBuilder(TableName.valueOf(name.getMethodName()))
.setColumnFamily(hcd).setDurability(Durability.ASYNC_WAL).build();
assertEquals(
"'testStringCustomizedValues', " +
"{TABLE_ATTRIBUTES => {DURABILITY => 'ASYNC_WAL'}}, {NAME => 'cf', BLOCKSIZE => '1000'}",
"{TABLE_ATTRIBUTES => {DURABILITY => 'ASYNC_WAL'}}, "
+ "{NAME => 'cf', BLOCKSIZE => '131072 B (128KB)'}",
htd.toStringCustomizedValues());
htd = TableDescriptorBuilder.newBuilder(htd)
@ -356,7 +358,8 @@ public class TestTableDescriptorBuilder {
"'testStringCustomizedValues', " +
"{TABLE_ATTRIBUTES => {DURABILITY => 'ASYNC_WAL', "
+ "MAX_FILESIZE => '10737942528 B (10GB 512KB)', "
+ "MEMSTORE_FLUSHSIZE => '268435456 B (256MB)'}}, {NAME => 'cf', BLOCKSIZE => '1000'}",
+ "MEMSTORE_FLUSHSIZE => '268435456 B (256MB)'}}, "
+ "{NAME => 'cf', BLOCKSIZE => '131072 B (128KB)'}",
htd.toStringCustomizedValues());
}

View File

@ -1108,7 +1108,7 @@ module Hbase
end
cfdb.setTimeToLive(arg.delete(ColumnFamilyDescriptorBuilder::TTL)) if arg.include?(ColumnFamilyDescriptorBuilder::TTL)
cfdb.setDataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::DATA_BLOCK_ENCODING))) if arg.include?(ColumnFamilyDescriptorBuilder::DATA_BLOCK_ENCODING)
cfdb.setBlocksize(JInteger.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::BLOCKSIZE))) if arg.include?(ColumnFamilyDescriptorBuilder::BLOCKSIZE)
cfdb.setBlocksize(arg.delete(ColumnFamilyDescriptorBuilder::BLOCKSIZE)) if arg.include?(ColumnFamilyDescriptorBuilder::BLOCKSIZE)
cfdb.setMaxVersions(JInteger.valueOf(arg.delete(HConstants::VERSIONS))) if arg.include?(HConstants::VERSIONS)
cfdb.setMinVersions(JInteger.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::MIN_VERSIONS))) if arg.include?(ColumnFamilyDescriptorBuilder::MIN_VERSIONS)
cfdb.setKeepDeletedCells(org.apache.hadoop.hbase.KeepDeletedCells.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::KEEP_DELETED_CELLS).to_s.upcase)) if arg.include?(ColumnFamilyDescriptorBuilder::KEEP_DELETED_CELLS)