HBASE-18419 Update IntegrationTestIngestWithMOB and Actions to use ColumnFamily builders for modification
Signed-off-by: Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
parent
254b78c116
commit
79b61a0546
|
@ -25,6 +25,9 @@ import java.util.List;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.client.Admin;
|
import org.apache.hadoop.hbase.client.Admin;
|
||||||
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||||
|
import org.apache.hadoop.hbase.client.Connection;
|
||||||
import org.apache.hadoop.hbase.client.ConnectionFactory;
|
import org.apache.hadoop.hbase.client.ConnectionFactory;
|
||||||
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
|
import org.apache.hadoop.hbase.testclassification.IntegrationTests;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
@ -113,20 +116,20 @@ public class IntegrationTestIngestWithMOB extends IntegrationTestIngest {
|
||||||
super.initTable();
|
super.initTable();
|
||||||
|
|
||||||
TableName tableName = getTablename();
|
TableName tableName = getTablename();
|
||||||
Admin admin = ConnectionFactory.createConnection().getAdmin();
|
try (Connection connection = ConnectionFactory.createConnection();
|
||||||
HTableDescriptor tableDesc = admin.getTableDescriptor(tableName);
|
Admin admin = connection.getAdmin()) {
|
||||||
LOG.info("Disabling table " + getTablename());
|
HTableDescriptor tableDesc = admin.getTableDescriptor(tableName);
|
||||||
admin.disableTable(tableName);
|
LOG.info("Disabling table " + getTablename());
|
||||||
for (HColumnDescriptor columnDescriptor : tableDesc.getFamilies()) {
|
admin.disableTable(tableName);
|
||||||
if(Arrays.equals(columnDescriptor.getName(), mobColumnFamily)) {
|
ColumnFamilyDescriptor mobColumn = tableDesc.getColumnFamily(mobColumnFamily);
|
||||||
columnDescriptor.setMobEnabled(true);
|
ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.newBuilder(mobColumn)
|
||||||
columnDescriptor.setMobThreshold((long) threshold);
|
.setMobEnabled(true)
|
||||||
admin.modifyColumnFamily(tableName, columnDescriptor);
|
.setMobThreshold((long) threshold)
|
||||||
}
|
.build();
|
||||||
|
admin.modifyColumnFamily(tableName, cfd);
|
||||||
|
LOG.info("Enabling table " + getTablename());
|
||||||
|
admin.enableTable(tableName);
|
||||||
}
|
}
|
||||||
LOG.info("Enabling table " + getTablename());
|
|
||||||
admin.enableTable(tableName);
|
|
||||||
admin.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -23,6 +23,8 @@ import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.apache.commons.lang.math.RandomUtils;
|
import org.apache.commons.lang.math.RandomUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
|
@ -30,12 +32,18 @@ import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.ClusterStatus;
|
import org.apache.hadoop.hbase.ClusterStatus;
|
||||||
import org.apache.hadoop.hbase.HBaseCluster;
|
import org.apache.hadoop.hbase.HBaseCluster;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.IntegrationTestingUtility;
|
import org.apache.hadoop.hbase.IntegrationTestingUtility;
|
||||||
import org.apache.hadoop.hbase.ServerLoad;
|
import org.apache.hadoop.hbase.ServerLoad;
|
||||||
import org.apache.hadoop.hbase.ServerName;
|
import org.apache.hadoop.hbase.ServerName;
|
||||||
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
|
import org.apache.hadoop.hbase.chaos.monkies.PolicyBasedChaosMonkey;
|
||||||
import org.apache.hadoop.hbase.client.Admin;
|
import org.apache.hadoop.hbase.client.Admin;
|
||||||
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||||
|
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -237,6 +245,45 @@ public class Action {
|
||||||
return cluster.getConf();
|
return cluster.getConf();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply a transform to all columns in a given table. If there are no columns in a table or if the context is stopping does nothing.
|
||||||
|
* @param tableName the table to modify
|
||||||
|
* @param transform the modification to perform. Callers will have the column name as a string and a column family builder available to them
|
||||||
|
*/
|
||||||
|
protected void modifyAllTableColumns(TableName tableName, BiConsumer<String, ColumnFamilyDescriptorBuilder> transform) throws IOException {
|
||||||
|
HBaseTestingUtility util = this.context.getHBaseIntegrationTestingUtility();
|
||||||
|
Admin admin = util.getAdmin();
|
||||||
|
|
||||||
|
TableDescriptor tableDescriptor = admin.listTableDescriptor(tableName);
|
||||||
|
ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
|
||||||
|
|
||||||
|
if (columnDescriptors == null || columnDescriptors.length == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
|
||||||
|
for (ColumnFamilyDescriptor descriptor : columnDescriptors) {
|
||||||
|
ColumnFamilyDescriptorBuilder cfd = ColumnFamilyDescriptorBuilder.newBuilder(descriptor);
|
||||||
|
transform.accept(descriptor.getNameAsString(), cfd);
|
||||||
|
builder.modifyColumnFamily(cfd.build());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't try the modify if we're stopping
|
||||||
|
if (this.context.isStopping()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
admin.modifyTable(builder.build());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Apply a transform to all columns in a given table. If there are no columns in a table or if the context is stopping does nothing.
|
||||||
|
* @param tableName the table to modify
|
||||||
|
* @param transform the modification to perform on each column family descriptor builder
|
||||||
|
*/
|
||||||
|
protected void modifyAllTableColumns(TableName tableName, Consumer<ColumnFamilyDescriptorBuilder> transform) throws IOException {
|
||||||
|
modifyAllTableColumns(tableName, (name, cfd) -> transform.accept(cfd));
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Context for Action's
|
* Context for Action's
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -20,11 +20,7 @@ package org.apache.hadoop.hbase.chaos.actions;
|
||||||
|
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Admin;
|
|
||||||
import org.apache.hadoop.hbase.regionserver.BloomType;
|
import org.apache.hadoop.hbase.regionserver.BloomType;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -46,37 +42,19 @@ public class ChangeBloomFilterAction extends Action {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void perform() throws Exception {
|
public void perform() throws Exception {
|
||||||
Random random = new Random();
|
final Random random = new Random();
|
||||||
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
|
|
||||||
Admin admin = util.getAdmin();
|
|
||||||
|
|
||||||
LOG.info("Performing action: Change bloom filter on all columns of table "
|
|
||||||
+ tableName);
|
|
||||||
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
|
|
||||||
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
|
|
||||||
|
|
||||||
if (columnDescriptors == null || columnDescriptors.length == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
final BloomType[] bloomArray = BloomType.values();
|
final BloomType[] bloomArray = BloomType.values();
|
||||||
final int bloomArraySize = bloomArray.length;
|
final int bloomArraySize = bloomArray.length;
|
||||||
|
|
||||||
for (HColumnDescriptor descriptor : columnDescriptors) {
|
LOG.info("Performing action: Change bloom filter on all columns of table " + tableName);
|
||||||
int bloomFilterIndex = random.nextInt(bloomArraySize);
|
|
||||||
LOG.debug("Performing action: About to set bloom filter type to "
|
|
||||||
+ bloomArray[bloomFilterIndex] + " on column "
|
|
||||||
+ descriptor.getNameAsString() + " of table " + tableName);
|
|
||||||
descriptor.setBloomFilterType(bloomArray[bloomFilterIndex]);
|
|
||||||
LOG.debug("Performing action: Just set bloom filter type to "
|
|
||||||
+ bloomArray[bloomFilterIndex] + " on column "
|
|
||||||
+ descriptor.getNameAsString() + " of table " + tableName);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't try the modify if we're stopping
|
modifyAllTableColumns(tableName, (columnName, columnBuilder) -> {
|
||||||
if (context.isStopping()) {
|
BloomType bloomType = bloomArray[random.nextInt(bloomArraySize)];
|
||||||
return;
|
LOG.debug("Performing action: About to set bloom filter type to "
|
||||||
}
|
+ bloomType + " on column " + columnName + " of table " + tableName);
|
||||||
admin.modifyTable(tableName, tableDescriptor);
|
columnBuilder.setBloomFilterType(bloomType);
|
||||||
|
});
|
||||||
|
|
||||||
|
LOG.debug("Performing action: Just set bloom filter types on table " + tableName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,24 +18,19 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.chaos.actions;
|
package org.apache.hadoop.hbase.chaos.actions;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Random;
|
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Admin;
|
|
||||||
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
|
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
|
||||||
import org.apache.hadoop.io.compress.Compressor;
|
import org.apache.hadoop.io.compress.Compressor;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Action that changes the compression algorithm on a column family from a list of tables.
|
* Action that changes the compression algorithm on a column family from a list of tables.
|
||||||
*/
|
*/
|
||||||
public class ChangeCompressionAction extends Action {
|
public class ChangeCompressionAction extends Action {
|
||||||
private final TableName tableName;
|
private final TableName tableName;
|
||||||
|
private final Random random;
|
||||||
private Admin admin;
|
|
||||||
private Random random;
|
|
||||||
|
|
||||||
public ChangeCompressionAction(TableName tableName) {
|
public ChangeCompressionAction(TableName tableName) {
|
||||||
this.tableName = tableName;
|
this.tableName = tableName;
|
||||||
|
@ -43,20 +38,7 @@ public class ChangeCompressionAction extends Action {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void init(ActionContext context) throws IOException {
|
public void perform() throws IOException {
|
||||||
super.init(context);
|
|
||||||
this.admin = context.getHBaseIntegrationTestingUtility().getAdmin();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void perform() throws Exception {
|
|
||||||
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
|
|
||||||
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
|
|
||||||
|
|
||||||
if (columnDescriptors == null || columnDescriptors.length == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Possible compression algorithms. If an algorithm is not supported,
|
// Possible compression algorithms. If an algorithm is not supported,
|
||||||
// modifyTable will fail, so there is no harm.
|
// modifyTable will fail, so there is no harm.
|
||||||
Algorithm[] possibleAlgos = Algorithm.values();
|
Algorithm[] possibleAlgos = Algorithm.values();
|
||||||
|
@ -79,25 +61,19 @@ public class ChangeCompressionAction extends Action {
|
||||||
break;
|
break;
|
||||||
} catch (Throwable t) {
|
} catch (Throwable t) {
|
||||||
LOG.info("Performing action: Changing compression algorithms to " + algo +
|
LOG.info("Performing action: Changing compression algorithms to " + algo +
|
||||||
" is not supported, pick another one");
|
" is not supported, pick another one");
|
||||||
}
|
}
|
||||||
} while (true);
|
} while (true);
|
||||||
|
|
||||||
|
final Algorithm chosenAlgo = algo; // for use in lambda
|
||||||
LOG.debug("Performing action: Changing compression algorithms on "
|
LOG.debug("Performing action: Changing compression algorithms on "
|
||||||
+ tableName.getNameAsString() + " to " + algo);
|
+ tableName.getNameAsString() + " to " + chosenAlgo);
|
||||||
for (HColumnDescriptor descriptor : columnDescriptors) {
|
modifyAllTableColumns(tableName, columnFamilyDescriptorBuilder -> {
|
||||||
if (random.nextBoolean()) {
|
if (random.nextBoolean()) {
|
||||||
descriptor.setCompactionCompressionType(algo);
|
columnFamilyDescriptorBuilder.setCompactionCompressionType(chosenAlgo);
|
||||||
} else {
|
} else {
|
||||||
descriptor.setCompressionType(algo);
|
columnFamilyDescriptorBuilder.setCompressionType(chosenAlgo);
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
|
|
||||||
// Don't try the modify if we're stopping
|
|
||||||
if (context.isStopping()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
admin.modifyTable(tableName, tableDescriptor);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,23 +18,18 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.chaos.actions;
|
package org.apache.hadoop.hbase.chaos.actions;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.TableName;
|
||||||
|
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.TableName;
|
|
||||||
import org.apache.hadoop.hbase.client.Admin;
|
|
||||||
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Action that changes the encoding on a column family from a list of tables.
|
* Action that changes the encoding on a column family from a list of tables.
|
||||||
*/
|
*/
|
||||||
public class ChangeEncodingAction extends Action {
|
public class ChangeEncodingAction extends Action {
|
||||||
private final TableName tableName;
|
private final TableName tableName;
|
||||||
|
private final Random random;
|
||||||
private Admin admin;
|
|
||||||
private Random random;
|
|
||||||
|
|
||||||
public ChangeEncodingAction(TableName tableName) {
|
public ChangeEncodingAction(TableName tableName) {
|
||||||
this.tableName = tableName;
|
this.tableName = tableName;
|
||||||
|
@ -42,34 +37,16 @@ public class ChangeEncodingAction extends Action {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void init(ActionContext context) throws IOException {
|
public void perform() throws IOException {
|
||||||
super.init(context);
|
|
||||||
this.admin = context.getHBaseIntegrationTestingUtility().getAdmin();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void perform() throws Exception {
|
|
||||||
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
|
|
||||||
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
|
|
||||||
|
|
||||||
if (columnDescriptors == null || columnDescriptors.length == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
LOG.debug("Performing action: Changing encodings on " + tableName);
|
LOG.debug("Performing action: Changing encodings on " + tableName);
|
||||||
// possible DataBlockEncoding id's
|
// possible DataBlockEncoding id's
|
||||||
int[] possibleIds = {0, 2, 3, 4, 6};
|
final int[] possibleIds = {0, 2, 3, 4, 6};
|
||||||
for (HColumnDescriptor descriptor : columnDescriptors) {
|
|
||||||
short id = (short) possibleIds[random.nextInt(possibleIds.length)];
|
|
||||||
descriptor.setDataBlockEncoding(DataBlockEncoding.getEncodingById(id));
|
|
||||||
LOG.debug("Set encoding of column family " + descriptor.getNameAsString()
|
|
||||||
+ " to: " + descriptor.getDataBlockEncoding());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Don't try the modify if we're stopping
|
modifyAllTableColumns(tableName, (columnName, columnBuilder) -> {
|
||||||
if (context.isStopping()) {
|
short id = (short) possibleIds[random.nextInt(possibleIds.length)];
|
||||||
return;
|
DataBlockEncoding encoding = DataBlockEncoding.getEncodingById(id);
|
||||||
}
|
columnBuilder.setDataBlockEncoding(encoding);
|
||||||
admin.modifyTable(tableName, tableDescriptor);
|
LOG.debug("Set encoding of column family " + columnName + " to: " + encoding);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,10 +21,7 @@ package org.apache.hadoop.hbase.chaos.actions;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Admin;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Action that changes the number of versions on a column family from a list of tables.
|
* Action that changes the number of versions on a column family from a list of tables.
|
||||||
|
@ -34,7 +31,6 @@ import org.apache.hadoop.hbase.client.Admin;
|
||||||
public class ChangeVersionsAction extends Action {
|
public class ChangeVersionsAction extends Action {
|
||||||
private final TableName tableName;
|
private final TableName tableName;
|
||||||
|
|
||||||
private Admin admin;
|
|
||||||
private Random random;
|
private Random random;
|
||||||
|
|
||||||
public ChangeVersionsAction(TableName tableName) {
|
public ChangeVersionsAction(TableName tableName) {
|
||||||
|
@ -43,29 +39,13 @@ public class ChangeVersionsAction extends Action {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void init(ActionContext context) throws IOException {
|
public void perform() throws IOException {
|
||||||
super.init(context);
|
final int versions = random.nextInt(3) + 1;
|
||||||
this.admin = context.getHBaseIntegrationTestingUtility().getAdmin();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
LOG.debug("Performing action: Changing versions on " + tableName + " to " + versions);
|
||||||
public void perform() throws Exception {
|
modifyAllTableColumns(tableName, columnBuilder -> {
|
||||||
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
|
columnBuilder.setMinVersions(versions).setMaxVersions(versions);
|
||||||
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
|
});
|
||||||
|
LOG.debug("Performing action: Just changed versions on " + tableName);
|
||||||
if ( columnDescriptors == null || columnDescriptors.length == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
int versions = random.nextInt(3) + 1;
|
|
||||||
for(HColumnDescriptor descriptor:columnDescriptors) {
|
|
||||||
descriptor.setVersions(versions, versions);
|
|
||||||
}
|
|
||||||
// Don't try the modify if we're stopping
|
|
||||||
if (context.isStopping()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
LOG.debug("Performing action: Changing versions on " + tableName.getNameAsString());
|
|
||||||
admin.modifyTable(tableName, tableDescriptor);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,10 +22,11 @@ import java.io.IOException;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Admin;
|
import org.apache.hadoop.hbase.client.Admin;
|
||||||
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -51,8 +52,8 @@ public class RemoveColumnAction extends Action {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void perform() throws Exception {
|
public void perform() throws Exception {
|
||||||
HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
|
TableDescriptor tableDescriptor = admin.listTableDescriptor(tableName);
|
||||||
HColumnDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
|
ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
|
||||||
|
|
||||||
if (columnDescriptors.length <= (protectedColumns == null ? 1 : protectedColumns.size())) {
|
if (columnDescriptors.length <= (protectedColumns == null ? 1 : protectedColumns.size())) {
|
||||||
return;
|
return;
|
||||||
|
@ -66,12 +67,14 @@ public class RemoveColumnAction extends Action {
|
||||||
byte[] colDescName = columnDescriptors[index].getName();
|
byte[] colDescName = columnDescriptors[index].getName();
|
||||||
LOG.debug("Performing action: Removing " + Bytes.toString(colDescName)+ " from "
|
LOG.debug("Performing action: Removing " + Bytes.toString(colDescName)+ " from "
|
||||||
+ tableName.getNameAsString());
|
+ tableName.getNameAsString());
|
||||||
tableDescriptor.removeFamily(colDescName);
|
|
||||||
|
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
|
||||||
|
builder.removeColumnFamily(colDescName);
|
||||||
|
|
||||||
// Don't try the modify if we're stopping
|
// Don't try the modify if we're stopping
|
||||||
if (context.isStopping()) {
|
if (context.isStopping()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
admin.modifyTable(tableName, tableDescriptor);
|
admin.modifyTable(builder.build());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue