HBASE-18419 Update IntegrationTestIngestWithMOB and Actions to use ColumnFamily builders for modification

Signed-off-by: Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
Mike Drob 2017-07-23 14:50:56 +08:00 committed by Chia-Ping Tsai
parent 254b78c116
commit 79b61a0546
7 changed files with 113 additions and 149 deletions

View File

@ -25,6 +25,9 @@ import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
import org.apache.hadoop.hbase.util.Bytes;
@ -113,20 +116,20 @@ public class IntegrationTestIngestWithMOB extends IntegrationTestIngest {
super.initTable();
TableName tableName = getTablename();
Admin admin = ConnectionFactory.createConnection().getAdmin();
HTableDescriptor tableDesc = admin.getTableDescriptor(tableName);
LOG.info("Disabling table " + getTablename());
admin.disableTable(tableName);
for (HColumnDescriptor columnDescriptor : tableDesc.getFamilies()) {
if(Arrays.equals(columnDescriptor.getName(), mobColumnFamily)) {
columnDescriptor.setMobEnabled(true);
columnDescriptor.setMobThreshold((long) threshold);
admin.modifyColumnFamily(tableName, columnDescriptor);
}
try (Connection connection = ConnectionFactory.createConnection();
Admin admin = connection.getAdmin()) {
HTableDescriptor tableDesc = admin.getTableDescriptor(tableName);
LOG.info("Disabling table " + getTablename());
admin.disableTable(tableName);
ColumnFamilyDescriptor mobColumn = tableDesc.getColumnFamily(mobColumnFamily);
ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(mobColumn)
.setMobEnabled(true)
.setMobThreshold((long) threshold)
.build();
admin.modifyColumnFamily(tableName, cfd);
LOG.info("Enabling table " + getTablename());
admin.enableTable(tableName);
}
LOG.info("Enabling table " + getTablename());
admin.enableTable(tableName);
admin.close();
}
@Override

View File

@ -23,6 +23,8 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.logging.Log;
@ -30,12 +32,18 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseCluster;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.ServerLoad;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
/**
@ -237,6 +245,45 @@ public class Action {
return cluster.getConf();
}
/**
* Apply a transform to all columns in a given table. If there are no columns in a table or if the context is stopping does nothing.
* @param tableName the table to modify
* @param transform the modification to perform. Callers will have the column name as a string and a column family builder available to them
*/
protected void modifyAllTableColumns(TableName tableName, BiConsumer<String, ColumnFamilyDescriptorBuilder> transform) throws IOException {
HBaseTestingUtility util = this.context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
TableDescriptor tableDescriptor = admin.listTableDescriptor(tableName);
ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
if (columnDescriptors == null || columnDescriptors.length == 0) {
return;
}
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
for (ColumnFamilyDescriptor descriptor : columnDescriptors) {
ColumnFamilyDescriptorBuilder cfd = ColumnFamilyDescriptorBuilder.newBuilder(descriptor);
transform.accept(descriptor.getNameAsString(), cfd);
builder.modifyColumnFamily(cfd.build());
}
// Don't try the modify if we're stopping
if (this.context.isStopping()) {
return;
}
admin.modifyTable(builder.build());
}
/**
* Apply a transform to all columns in a given table. If there are no columns in a table or if the context is stopping does nothing.
* @param tableName the table to modify
* @param transform the modification to perform on each column family descriptor builder
*/
protected void modifyAllTableColumns(TableName tableName, Consumer<ColumnFamilyDescriptorBuilder> transform) throws IOException {
modifyAllTableColumns(tableName, (name, cfd) -> transform.accept(cfd));
}
/**
* Context for Action's
*/

View File

@ -20,11 +20,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.util.Random;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.regionserver.BloomType;
/**
@ -46,37 +42,19 @@ public class ChangeBloomFilterAction extends Action {
@Override
public void perform() throws Exception {
Random random = new Random();
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin();
LOG.info("Performing action: Change bloom filter on all columns of table "
+ tableName);
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
if (columnDescriptors == null || columnDescriptors.length == 0) {
return;
}
final Random random = new Random();
final BloomType[] bloomArray = BloomType.values();
final int bloomArraySize = bloomArray.length;
for (HColumnDescriptor descriptor : columnDescriptors) {
int bloomFilterIndex = random.nextInt(bloomArraySize);
LOG.debug("Performing action: About to set bloom filter type to "
+ bloomArray[bloomFilterIndex] + " on column "
+ descriptor.getNameAsString() + " of table " + tableName);
descriptor.setBloomFilterType(bloomArray[bloomFilterIndex]);
LOG.debug("Performing action: Just set bloom filter type to "
+ bloomArray[bloomFilterIndex] + " on column "
+ descriptor.getNameAsString() + " of table " + tableName);
}
LOG.info("Performing action: Change bloom filter on all columns of table " + tableName);
// Don't try the modify if we're stopping
if (context.isStopping()) {
return;
}
admin.modifyTable(tableName, tableDescriptor);
modifyAllTableColumns(tableName, (columnName, columnBuilder) -> {
BloomType bloomType = bloomArray[random.nextInt(bloomArraySize)];
LOG.debug("Performing action: About to set bloom filter type to "
+ bloomType + " on column " + columnName + " of table " + tableName);
columnBuilder.setBloomFilterType(bloomType);
});
LOG.debug("Performing action: Just set bloom filter types on table " + tableName);
}
}

View File

@ -18,24 +18,19 @@
package org.apache.hadoop.hbase.chaos.actions;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.io.compress.Compressor;
import java.io.IOException;
import java.util.Random;
/**
* Action that changes the compression algorithm on a column family from a list of tables.
*/
public class ChangeCompressionAction extends Action {
private final TableName tableName;
private Admin admin;
private Random random;
private final Random random;
public ChangeCompressionAction(TableName tableName) {
this.tableName = tableName;
@ -43,20 +38,7 @@ public class ChangeCompressionAction extends Action {
}
@Override
public void init(ActionContext context) throws IOException {
super.init(context);
this.admin = context.getHBaseIntegrationTestingUtility().getAdmin();
}
@Override
public void perform() throws Exception {
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
if (columnDescriptors == null || columnDescriptors.length == 0) {
return;
}
public void perform() throws IOException {
// Possible compression algorithms. If an algorithm is not supported,
// modifyTable will fail, so there is no harm.
Algorithm[] possibleAlgos = Algorithm.values();
@ -79,25 +61,19 @@ public class ChangeCompressionAction extends Action {
break;
} catch (Throwable t) {
LOG.info("Performing action: Changing compression algorithms to " + algo +
" is not supported, pick another one");
" is not supported, pick another one");
}
} while (true);
final Algorithm chosenAlgo = algo; // for use in lambda
LOG.debug("Performing action: Changing compression algorithms on "
+ tableName.getNameAsString() + " to " + algo);
for (HColumnDescriptor descriptor : columnDescriptors) {
+ tableName.getNameAsString() + " to " + chosenAlgo);
modifyAllTableColumns(tableName, columnFamilyDescriptorBuilder -> {
if (random.nextBoolean()) {
descriptor.setCompactionCompressionType(algo);
columnFamilyDescriptorBuilder.setCompactionCompressionType(chosenAlgo);
} else {
descriptor.setCompressionType(algo);
columnFamilyDescriptorBuilder.setCompressionType(chosenAlgo);
}
}
// Don't try the modify if we're stopping
if (context.isStopping()) {
return;
}
admin.modifyTable(tableName, tableDescriptor);
});
}
}

View File

@ -18,23 +18,18 @@
package org.apache.hadoop.hbase.chaos.actions;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
/**
* Action that changes the encoding on a column family from a list of tables.
*/
public class ChangeEncodingAction extends Action {
private final TableName tableName;
private Admin admin;
private Random random;
private final Random random;
public ChangeEncodingAction(TableName tableName) {
this.tableName = tableName;
@ -42,34 +37,16 @@ public class ChangeEncodingAction extends Action {
}
@Override
public void init(ActionContext context) throws IOException {
super.init(context);
this.admin = context.getHBaseIntegrationTestingUtility().getAdmin();
}
@Override
public void perform() throws Exception {
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
if (columnDescriptors == null || columnDescriptors.length == 0) {
return;
}
public void perform() throws IOException {
LOG.debug("Performing action: Changing encodings on " + tableName);
// possible DataBlockEncoding id's
int[] possibleIds = {0, 2, 3, 4, 6};
for (HColumnDescriptor descriptor : columnDescriptors) {
short id = (short) possibleIds[random.nextInt(possibleIds.length)];
descriptor.setDataBlockEncoding(DataBlockEncoding.getEncodingById(id));
LOG.debug("Set encoding of column family " + descriptor.getNameAsString()
+ " to: " + descriptor.getDataBlockEncoding());
}
final int[] possibleIds = {0, 2, 3, 4, 6};
// Don't try the modify if we're stopping
if (context.isStopping()) {
return;
}
admin.modifyTable(tableName, tableDescriptor);
modifyAllTableColumns(tableName, (columnName, columnBuilder) -> {
short id = (short) possibleIds[random.nextInt(possibleIds.length)];
DataBlockEncoding encoding = DataBlockEncoding.getEncodingById(id);
columnBuilder.setDataBlockEncoding(encoding);
LOG.debug("Set encoding of column family " + columnName + " to: " + encoding);
});
}
}

View File

@ -21,10 +21,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
/**
* Action that changes the number of versions on a column family from a list of tables.
@ -34,7 +31,6 @@ import org.apache.hadoop.hbase.client.Admin;
public class ChangeVersionsAction extends Action {
private final TableName tableName;
private Admin admin;
private Random random;
public ChangeVersionsAction(TableName tableName) {
@ -43,29 +39,13 @@ public class ChangeVersionsAction extends Action {
}
@Override
public void init(ActionContext context) throws IOException {
super.init(context);
this.admin = context.getHBaseIntegrationTestingUtility().getAdmin();
}
public void perform() throws IOException {
final int versions = random.nextInt(3) + 1;
@Override
public void perform() throws Exception {
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
if ( columnDescriptors == null || columnDescriptors.length == 0) {
return;
}
int versions = random.nextInt(3) + 1;
for(HColumnDescriptor descriptor:columnDescriptors) {
descriptor.setVersions(versions, versions);
}
// Don't try the modify if we're stopping
if (context.isStopping()) {
return;
}
LOG.debug("Performing action: Changing versions on " + tableName.getNameAsString());
admin.modifyTable(tableName, tableDescriptor);
LOG.debug("Performing action: Changing versions on " + tableName + " to " + versions);
modifyAllTableColumns(tableName, columnBuilder -> {
columnBuilder.setMinVersions(versions).setMaxVersions(versions);
});
LOG.debug("Performing action: Just changed versions on " + tableName);
}
}

View File

@ -22,10 +22,11 @@ import java.io.IOException;
import java.util.Random;
import java.util.Set;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
/**
@ -51,8 +52,8 @@ public class RemoveColumnAction extends Action {
@Override
public void perform() throws Exception {
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
TableDescriptor tableDescriptor = admin.listTableDescriptor(tableName);
ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
if (columnDescriptors.length <= (protectedColumns == null ? 1 : protectedColumns.size())) {
return;
@ -66,12 +67,14 @@ public class RemoveColumnAction extends Action {
byte[] colDescName = columnDescriptors[index].getName();
LOG.debug("Performing action: Removing " + Bytes.toString(colDescName)+ " from "
+ tableName.getNameAsString());
tableDescriptor.removeFamily(colDescName);
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
builder.removeColumnFamily(colDescName);
// Don't try the modify if we're stopping
if (context.isStopping()) {
return;
}
admin.modifyTable(tableName, tableDescriptor);
admin.modifyTable(builder.build());
}
}