HBASE-25519 BLOCKSIZE needs to support pretty print (#2944)

This commit is contained in:
Baiqiang Zhao 2021-02-09 23:27:27 +08:00 committed by GitHub
parent b05dcac9fd
commit 9a6bd5b6f1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 51 additions and 4 deletions

View File

@ -313,6 +313,11 @@ public class HColumnDescriptor implements ColumnFamilyDescriptor, Comparable<HCo
return this; return this;
} }
public HColumnDescriptor setBlocksize(String value) throws HBaseException {
getDelegateeForModification().setBlocksize(value);
return this;
}
@Override @Override
public Compression.Algorithm getCompressionType() { public Compression.Algorithm getCompressionType() {
return delegatee.getCompressionType(); return delegatee.getCompressionType();

View File

@ -312,6 +312,8 @@ public class ColumnFamilyDescriptorBuilder {
switch (key) { switch (key) {
case TTL: case TTL:
return Unit.TIME_INTERVAL; return Unit.TIME_INTERVAL;
case BLOCKSIZE:
return Unit.BYTE;
default: default:
return Unit.NONE; return Unit.NONE;
} }
@ -417,6 +419,11 @@ public class ColumnFamilyDescriptorBuilder {
return this; return this;
} }
public ColumnFamilyDescriptorBuilder setBlocksize(String value) throws HBaseException {
desc.setBlocksize(value);
return this;
}
public ColumnFamilyDescriptorBuilder setBloomFilterType(final BloomType value) { public ColumnFamilyDescriptorBuilder setBloomFilterType(final BloomType value) {
desc.setBloomFilterType(value); desc.setBloomFilterType(value);
return this; return this;
@ -780,6 +787,11 @@ public class ColumnFamilyDescriptorBuilder {
return setValue(BLOCKSIZE_BYTES, Integer.toString(s)); return setValue(BLOCKSIZE_BYTES, Integer.toString(s));
} }
public ModifyableColumnFamilyDescriptor setBlocksize(String blocksize) throws HBaseException {
return setBlocksize(Integer.parseInt(PrettyPrinter.
valueOf(blocksize, PrettyPrinter.Unit.BYTE)));
}
@Override @Override
public Compression.Algorithm getCompressionType() { public Compression.Algorithm getCompressionType() {
return getStringOrDefault(COMPRESSION_BYTES, return getStringOrDefault(COMPRESSION_BYTES,

View File

@ -196,6 +196,34 @@ public class TestColumnFamilyDescriptorBuilder {
Assert.assertEquals(43282800, builder.build().getTimeToLive()); Assert.assertEquals(43282800, builder.build().getTimeToLive());
} }
@Test
public void testSetBlocksize() throws HBaseException {
String blocksize;
ColumnFamilyDescriptorBuilder builder =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("foo"));
blocksize = "131072";
builder.setBlocksize(blocksize);
assertEquals(131072, builder.build().getBlocksize());
blocksize = "100KB";
builder.setBlocksize(blocksize);
assertEquals(102400, builder.build().getBlocksize());
blocksize = "1MB";
builder.setBlocksize(blocksize);
assertEquals(1048576, builder.build().getBlocksize());
// ignore case
blocksize = "64kb 512B";
builder.setBlocksize(blocksize);
assertEquals(66048, builder.build().getBlocksize());
blocksize = "66048 B (64KB 512B)";
builder.setBlocksize(blocksize);
assertEquals(66048, builder.build().getBlocksize());
}
/** /**
* Test for verifying the ColumnFamilyDescriptorBuilder's default values so that backward * Test for verifying the ColumnFamilyDescriptorBuilder's default values so that backward
* compatibility with hbase-1.x can be mantained (see HBASE-24981). * compatibility with hbase-1.x can be mantained (see HBASE-24981).

View File

@ -371,7 +371,7 @@ public class TestTableDescriptorBuilder {
public void testStringCustomizedValues() throws HBaseException { public void testStringCustomizedValues() throws HBaseException {
byte[] familyName = Bytes.toBytes("cf"); byte[] familyName = Bytes.toBytes("cf");
ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(familyName) ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(familyName)
.setBlocksize(1000) .setBlocksize(131072)
.build(); .build();
TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())) TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
.setColumnFamily(hcd) .setColumnFamily(hcd)
@ -380,7 +380,8 @@ public class TestTableDescriptorBuilder {
assertEquals( assertEquals(
"'testStringCustomizedValues', " + "'testStringCustomizedValues', " +
"{TABLE_ATTRIBUTES => {DURABILITY => 'ASYNC_WAL'}}, {NAME => 'cf', BLOCKSIZE => '1000'}", "{TABLE_ATTRIBUTES => {DURABILITY => 'ASYNC_WAL'}}, "
+ "{NAME => 'cf', BLOCKSIZE => '131072 B (128KB)'}",
htd.toStringCustomizedValues()); htd.toStringCustomizedValues());
htd = TableDescriptorBuilder.newBuilder(htd) htd = TableDescriptorBuilder.newBuilder(htd)
@ -391,7 +392,8 @@ public class TestTableDescriptorBuilder {
"'testStringCustomizedValues', " + "'testStringCustomizedValues', " +
"{TABLE_ATTRIBUTES => {DURABILITY => 'ASYNC_WAL', " "{TABLE_ATTRIBUTES => {DURABILITY => 'ASYNC_WAL', "
+ "MAX_FILESIZE => '10737942528 B (10GB 512KB)', " + "MAX_FILESIZE => '10737942528 B (10GB 512KB)', "
+ "MEMSTORE_FLUSHSIZE => '268435456 B (256MB)'}}, {NAME => 'cf', BLOCKSIZE => '1000'}", + "MEMSTORE_FLUSHSIZE => '268435456 B (256MB)'}}, "
+ "{NAME => 'cf', BLOCKSIZE => '131072 B (128KB)'}",
htd.toStringCustomizedValues()); htd.toStringCustomizedValues());
} }

View File

@ -1025,7 +1025,7 @@ module Hbase
end end
family.setTimeToLive(arg.delete(ColumnFamilyDescriptorBuilder::TTL)) if arg.include?(ColumnFamilyDescriptorBuilder::TTL) family.setTimeToLive(arg.delete(ColumnFamilyDescriptorBuilder::TTL)) if arg.include?(ColumnFamilyDescriptorBuilder::TTL)
family.setDataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::DATA_BLOCK_ENCODING))) if arg.include?(ColumnFamilyDescriptorBuilder::DATA_BLOCK_ENCODING) family.setDataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::DATA_BLOCK_ENCODING))) if arg.include?(ColumnFamilyDescriptorBuilder::DATA_BLOCK_ENCODING)
family.setBlocksize(JInteger.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::BLOCKSIZE))) if arg.include?(ColumnFamilyDescriptorBuilder::BLOCKSIZE) family.setBlocksize(arg.delete(ColumnFamilyDescriptorBuilder::BLOCKSIZE)) if arg.include?(ColumnFamilyDescriptorBuilder::BLOCKSIZE)
family.setMaxVersions(JInteger.valueOf(arg.delete(HConstants::VERSIONS))) if arg.include?(HConstants::VERSIONS) family.setMaxVersions(JInteger.valueOf(arg.delete(HConstants::VERSIONS))) if arg.include?(HConstants::VERSIONS)
family.setMinVersions(JInteger.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::MIN_VERSIONS))) if arg.include?(ColumnFamilyDescriptorBuilder::MIN_VERSIONS) family.setMinVersions(JInteger.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::MIN_VERSIONS))) if arg.include?(ColumnFamilyDescriptorBuilder::MIN_VERSIONS)
family.setKeepDeletedCells(org.apache.hadoop.hbase.KeepDeletedCells.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::KEEP_DELETED_CELLS).to_s.upcase)) if arg.include?(ColumnFamilyDescriptorBuilder::KEEP_DELETED_CELLS) family.setKeepDeletedCells(org.apache.hadoop.hbase.KeepDeletedCells.valueOf(arg.delete(ColumnFamilyDescriptorBuilder::KEEP_DELETED_CELLS).to_s.upcase)) if arg.include?(ColumnFamilyDescriptorBuilder::KEEP_DELETED_CELLS)