HBASE-24627 Normalize one table at a time

Introduce an additional method to our Admin interface that allow an
operator to selectivly run the normalizer. The IPC protocol supports
general table name select via compound filter.

Signed-off-by: Sean Busbey <busbey@apache.org>
Signed-off-by: Viraj Jasani <vjasani@apache.org>
This commit is contained in:
Nick Dimiduk 2020-08-06 19:07:46 -07:00 committed by Nick Dimiduk
parent 00aa3bc9fc
commit acfa08cd87
16 changed files with 379 additions and 60 deletions

View File

@ -1292,10 +1292,24 @@ public interface Admin extends Abortable, Closeable {
* the request was submitted successfully. We need to check logs for the details of which regions * the request was submitted successfully. We need to check logs for the details of which regions
* were split/merged. * were split/merged.
* *
* @return <code>true</code> if region normalizer ran, <code>false</code> otherwise. * @return {@code true} if region normalizer ran, {@code false} otherwise.
* @throws IOException if a remote or network exception occurs * @throws IOException if a remote or network exception occurs
*/ */
boolean normalize() throws IOException; default boolean normalize() throws IOException {
return normalize(new NormalizeTableFilterParams.Builder().build());
}
/**
* Invoke region normalizer. Can NOT run for various reasons. Check logs.
* This is a non-blocking invocation to region normalizer. If return value is true, it means
* the request was submitted successfully. We need to check logs for the details of which regions
* were split/merged.
*
* @param ntfp limit to tables matching the specified filter.
* @return {@code true} if region normalizer ran, {@code false} otherwise.
* @throws IOException if a remote or network exception occurs
*/
boolean normalize(NormalizeTableFilterParams ntfp) throws IOException;
/** /**
* Query the current state of the region normalizer. * Query the current state of the region normalizer.

View File

@ -1,4 +1,4 @@
/** /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
@ -1253,7 +1253,17 @@ public interface AsyncAdmin {
* @return true if region normalizer ran, false otherwise. The return value will be wrapped by a * @return true if region normalizer ran, false otherwise. The return value will be wrapped by a
* {@link CompletableFuture} * {@link CompletableFuture}
*/ */
CompletableFuture<Boolean> normalize(); default CompletableFuture<Boolean> normalize() {
return normalize(new NormalizeTableFilterParams.Builder().build());
}
/**
* Invoke region normalizer. Can NOT run for various reasons. Check logs.
* @param ntfp limit to tables matching the specified filter.
* @return true if region normalizer ran, false otherwise. The return value will be wrapped by a
* {@link CompletableFuture}
*/
CompletableFuture<Boolean> normalize(NormalizeTableFilterParams ntfp);
/** /**
* Turn the cleaner chore on/off. * Turn the cleaner chore on/off.

View File

@ -1,4 +1,4 @@
/** /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
@ -704,8 +704,8 @@ class AsyncHBaseAdmin implements AsyncAdmin {
} }
@Override @Override
public CompletableFuture<Boolean> normalize() { public CompletableFuture<Boolean> normalize(NormalizeTableFilterParams ntfp) {
return wrap(rawAdmin.normalize()); return wrap(rawAdmin.normalize(ntfp));
} }
@Override @Override

View File

@ -1557,18 +1557,13 @@ public class HBaseAdmin implements Admin {
} }
} }
/**
* Invoke region normalizer. Can NOT run for various reasons. Check logs.
*
* @return True if region normalizer ran, false otherwise.
*/
@Override @Override
public boolean normalize() throws IOException { public boolean normalize(NormalizeTableFilterParams ntfp) throws IOException {
return executeCallable(new MasterCallable<Boolean>(getConnection(), getRpcControllerFactory()) { return executeCallable(new MasterCallable<Boolean>(getConnection(), getRpcControllerFactory()) {
@Override @Override
protected Boolean rpcCall() throws Exception { protected Boolean rpcCall() throws Exception {
return master.normalize(getRpcController(), return master.normalize(getRpcController(),
RequestConverter.buildNormalizeRequest()).getNormalizerRan(); RequestConverter.buildNormalizeRequest(ntfp)).getNormalizerRan();
} }
}); });
} }

View File

@ -0,0 +1,107 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.util.List;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
/**
* A collection of criteria used for table selection. The logic of table selection is as follows:
* <ul>
* <li>
* When no parameter values are provided, an unfiltered list of all user tables is returned.
* </li>
* <li>
* When a list of {@link TableName TableNames} are provided, the filter starts with any of
* these tables that exist.
* </li>
* <li>
* When a {@code namespace} name is provided, the filter starts with all the tables present in
* that namespace.
* </li>
* <li>
* If both a list of {@link TableName TableNames} and a {@code namespace} name are provided,
* the {@link TableName} list is honored and the {@code namespace} name is ignored.
* </li>
* <li>
* If a {@code regex} is provided, this subset of {@link TableName TableNames} is further
* reduced to those that match the provided regular expression.
* </li>
* </ul>
*/
@InterfaceAudience.Public
public final class NormalizeTableFilterParams {
private final List<TableName> tableNames;
private final String regex;
private final String namespace;
private NormalizeTableFilterParams(final List<TableName> tableNames, final String regex,
final String namespace) {
this.tableNames = tableNames;
this.regex = regex;
this.namespace = namespace;
}
public List<TableName> getTableNames() {
return tableNames;
}
public String getRegex() {
return regex;
}
public String getNamespace() {
return namespace;
}
/**
* Used to instantiate an instance of {@link NormalizeTableFilterParams}.
*/
public static class Builder {
private List<TableName> tableNames;
private String regex;
private String namespace;
public Builder tableFilterParams(final NormalizeTableFilterParams ntfp) {
this.tableNames = ntfp.getTableNames();
this.regex = ntfp.getRegex();
this.namespace = ntfp.getNamespace();
return this;
}
public Builder tableNames(final List<TableName> tableNames) {
this.tableNames = tableNames;
return this;
}
public Builder regex(final String regex) {
this.regex = regex;
return this;
}
public Builder namespace(final String namespace) {
this.namespace = namespace;
return this;
}
public NormalizeTableFilterParams build() {
return new NormalizeTableFilterParams(tableNames, regex, namespace);
}
}
}

View File

@ -3257,14 +3257,18 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
} }
@Override @Override
public CompletableFuture<Boolean> normalize() { public CompletableFuture<Boolean> normalize(NormalizeTableFilterParams ntfp) {
return normalize(RequestConverter.buildNormalizeRequest(ntfp));
}
private CompletableFuture<Boolean> normalize(NormalizeRequest request) {
return this return this
.<Boolean> newMasterCaller() .<Boolean> newMasterCaller()
.action( .action(
(controller, stub) -> this.<NormalizeRequest, NormalizeResponse, Boolean> call( (controller, stub) -> this.call(
controller, stub, RequestConverter.buildNormalizeRequest(), controller, stub, request, MasterService.Interface::normalize,
(s, c, req, done) -> s.normalize(c, req, done), (resp) -> resp.getNormalizerRan())) NormalizeResponse::getNormalizerRan))
.call(); .call();
} }
@Override @Override

View File

@ -2251,6 +2251,13 @@ public final class ProtobufUtil {
.setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).build(); .setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).build();
} }
public static List<HBaseProtos.TableName> toProtoTableNameList(List<TableName> tableNameList) {
if (tableNameList == null) {
return new ArrayList<>();
}
return tableNameList.stream().map(ProtobufUtil::toProtoTableName).collect(Collectors.toList());
}
public static List<TableName> toTableNameList(List<HBaseProtos.TableName> tableNamesList) { public static List<TableName> toTableNameList(List<HBaseProtos.TableName> tableNamesList) {
if (tableNamesList == null) { if (tableNamesList == null) {
return new ArrayList<>(); return new ArrayList<>();

View File

@ -1,4 +1,4 @@
/** /*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
@ -48,6 +48,7 @@ import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.LogQueryFilter; import org.apache.hadoop.hbase.client.LogQueryFilter;
import org.apache.hadoop.hbase.client.MasterSwitchType; import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec; import org.apache.hadoop.hbase.client.RegionCoprocessorServiceExec;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
@ -1724,8 +1725,18 @@ public final class RequestConverter {
* *
* @return a NormalizeRequest * @return a NormalizeRequest
*/ */
public static NormalizeRequest buildNormalizeRequest() { public static NormalizeRequest buildNormalizeRequest(NormalizeTableFilterParams ntfp) {
return NormalizeRequest.newBuilder().build(); final NormalizeRequest.Builder builder = NormalizeRequest.newBuilder();
if (ntfp.getTableNames() != null) {
builder.addAllTableNames(ProtobufUtil.toProtoTableNameList(ntfp.getTableNames()));
}
if (ntfp.getRegex() != null) {
builder.setRegex(ntfp.getRegex());
}
if (ntfp.getNamespace() != null) {
builder.setNamespace(ntfp.getNamespace());
}
return builder.build();
} }
/** /**

View File

@ -353,6 +353,9 @@ message IsSplitOrMergeEnabledResponse {
} }
message NormalizeRequest { message NormalizeRequest {
repeated TableName table_names = 1;
optional string regex = 2;
optional string namespace = 3;
} }
message NormalizeResponse { message NormalizeResponse {

View File

@ -21,7 +21,6 @@ import static org.apache.hadoop.hbase.HConstants.DEFAULT_HBASE_SPLIT_COORDINATED
import static org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS; import static org.apache.hadoop.hbase.HConstants.HBASE_MASTER_LOGCLEANER_PLUGINS;
import static org.apache.hadoop.hbase.HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK; import static org.apache.hadoop.hbase.HConstants.HBASE_SPLIT_WAL_COORDINATED_BY_ZK;
import static org.apache.hadoop.hbase.util.DNS.MASTER_HOSTNAME_KEY; import static org.apache.hadoop.hbase.util.DNS.MASTER_HOSTNAME_KEY;
import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors;
import com.google.protobuf.Service; import com.google.protobuf.Service;
import java.io.IOException; import java.io.IOException;
@ -40,6 +39,7 @@ import java.util.Comparator;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
@ -82,9 +82,9 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotDisabledException; import org.apache.hadoop.hbase.TableNotDisabledException;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.UnknownRegionException; import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.MasterSwitchType; import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.RegionStatesCount; import org.apache.hadoop.hbase.client.RegionStatesCount;
@ -214,6 +214,7 @@ import org.apache.hadoop.hbase.zookeeper.ZKClusterId;
import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher; import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.apache.hadoop.hbase.zookeeper.ZNodePaths; import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.Server;
@ -222,12 +223,10 @@ import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.webapp.WebAppContext; import org.eclipse.jetty.webapp.WebAppContext;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.com.google.common.collect.Maps; import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils; import org.apache.hbase.thirdparty.org.apache.commons.collections4.CollectionUtils;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
@ -1882,14 +1881,18 @@ public class HMaster extends HRegionServer implements MasterServices {
return this.normalizer; return this.normalizer;
} }
public boolean normalizeRegions() throws IOException {
return normalizeRegions(new NormalizeTableFilterParams.Builder().build());
}
/** /**
* Perform normalization of cluster (invoked by {@link RegionNormalizerChore}). * Perform normalization of cluster.
* *
* @return true if an existing normalization was already in progress, or if a new normalization * @return true if an existing normalization was already in progress, or if a new normalization
* was performed successfully; false otherwise (specifically, if HMaster finished initializing * was performed successfully; false otherwise (specifically, if HMaster finished initializing
* or normalization is globally disabled). * or normalization is globally disabled).
*/ */
public boolean normalizeRegions() throws IOException { public boolean normalizeRegions(final NormalizeTableFilterParams ntfp) throws IOException {
final long startTime = EnvironmentEdgeManager.currentTime(); final long startTime = EnvironmentEdgeManager.currentTime();
if (regionNormalizerTracker == null || !regionNormalizerTracker.isNormalizerOn()) { if (regionNormalizerTracker == null || !regionNormalizerTracker.isNormalizerOn()) {
LOG.debug("Region normalization is disabled, don't run region normalizer."); LOG.debug("Region normalization is disabled, don't run region normalizer.");
@ -1910,12 +1913,19 @@ public class HMaster extends HRegionServer implements MasterServices {
int affectedTables = 0; int affectedTables = 0;
try { try {
final List<TableName> allEnabledTables = final Set<TableName> matchingTables = getTableDescriptors(new LinkedList<>(),
new ArrayList<>(tableStateManager.getTablesInStates(TableState.State.ENABLED)); ntfp.getNamespace(), ntfp.getRegex(), ntfp.getTableNames(), false)
Collections.shuffle(allEnabledTables); .stream()
.map(TableDescriptor::getTableName)
.collect(Collectors.toSet());
final Set<TableName> allEnabledTables =
tableStateManager.getTablesInStates(TableState.State.ENABLED);
final List<TableName> targetTables =
new ArrayList<>(Sets.intersection(matchingTables, allEnabledTables));
Collections.shuffle(targetTables);
final List<Long> submittedPlanProcIds = new ArrayList<>(); final List<Long> submittedPlanProcIds = new ArrayList<>();
for (TableName table : allEnabledTables) { for (TableName table : targetTables) {
if (table.isSystemTable()) { if (table.isSystemTable()) {
continue; continue;
} }
@ -3370,9 +3380,9 @@ public class HMaster extends HRegionServer implements MasterServices {
} }
/** /**
* @return list of table table descriptors after filtering by regex and whether to include system * Return a list of table table descriptors after applying any provided filter parameters. Note
* tables, etc. * that the user-facing description of this filter logic is presented on the class-level javadoc
* @throws IOException * of {@link NormalizeTableFilterParams}.
*/ */
private List<TableDescriptor> getTableDescriptors(final List<TableDescriptor> htds, private List<TableDescriptor> getTableDescriptors(final List<TableDescriptor> htds,
final String namespace, final String regex, final List<TableName> tableNameList, final String namespace, final String regex, final List<TableName> tableNameList,

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownRegionException; import org.apache.hadoop.hbase.UnknownRegionException;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.MasterSwitchType; import org.apache.hadoop.hbase.client.MasterSwitchType;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.client.RegionInfoBuilder;
@ -1932,7 +1933,14 @@ public class MasterRpcServices extends RSRpcServices implements
NormalizeRequest request) throws ServiceException { NormalizeRequest request) throws ServiceException {
rpcPreCheck("normalize"); rpcPreCheck("normalize");
try { try {
return NormalizeResponse.newBuilder().setNormalizerRan(master.normalizeRegions()).build(); final NormalizeTableFilterParams ntfp = new NormalizeTableFilterParams.Builder()
.tableNames(ProtobufUtil.toTableNameList(request.getTableNamesList()))
.regex(request.hasRegex() ? request.getRegex() : null)
.namespace(request.hasNamespace() ? request.getNamespace() : null)
.build();
return NormalizeResponse.newBuilder()
.setNormalizerRan(master.normalizeRegions(ntfp))
.build();
} catch (IOException ex) { } catch (IOException ex) {
throw new ServiceException(ex); throw new ServiceException(ex);
} }

View File

@ -22,6 +22,7 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -36,6 +37,7 @@ import org.apache.hadoop.hbase.Size;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter.ExplainingPredicate; import org.apache.hadoop.hbase.Waiter.ExplainingPredicate;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
@ -94,6 +96,9 @@ public class TestSimpleRegionNormalizerOnCluster {
// no way for the test to set the regionId on a created region, so disable this feature. // no way for the test to set the regionId on a created region, so disable this feature.
TEST_UTIL.getConfiguration().setInt("hbase.normalizer.merge.min_region_age.days", 0); TEST_UTIL.getConfiguration().setInt("hbase.normalizer.merge.min_region_age.days", 0);
// disable the normalizer coming along and running via Chore
TEST_UTIL.getConfiguration().setInt("hbase.normalizer.period", Integer.MAX_VALUE);
TEST_UTIL.startMiniCluster(1); TEST_UTIL.startMiniCluster(1);
TestNamespaceAuditor.waitForQuotaInitialize(TEST_UTIL); TestNamespaceAuditor.waitForQuotaInitialize(TEST_UTIL);
admin = TEST_UTIL.getAdmin(); admin = TEST_UTIL.getAdmin();
@ -107,13 +112,13 @@ public class TestSimpleRegionNormalizerOnCluster {
} }
@Before @Before
public void before() throws IOException { public void before() throws Exception {
// disable the normalizer ahead of time, let the test enable it when its ready. // disable the normalizer ahead of time, let the test enable it when its ready.
admin.normalizerSwitch(false); admin.normalizerSwitch(false);
} }
@Test @Test
public void testHonorsNormalizerSwitch() throws IOException { public void testHonorsNormalizerSwitch() throws Exception {
assertFalse(admin.isNormalizerEnabled()); assertFalse(admin.isNormalizerEnabled());
assertFalse(admin.normalize()); assertFalse(admin.normalize());
assertFalse(admin.normalizerSwitch(true)); assertFalse(admin.normalizerSwitch(true));
@ -220,14 +225,103 @@ public class TestSimpleRegionNormalizerOnCluster {
} }
} }
private static TableName buildTableNameForQuotaTest(final String methodName) throws IOException { @Test
public void testHonorsNamespaceFilter() throws Exception {
final NamespaceDescriptor namespaceDescriptor = NamespaceDescriptor.create("ns").build();
final TableName tn1 = TableName.valueOf("ns", name.getMethodName());
final TableName tn2 = TableName.valueOf(name.getMethodName());
try {
admin.createNamespace(namespaceDescriptor);
final int tn1RegionCount = createTableBegsSplit(tn1, true, false);
final int tn2RegionCount = createTableBegsSplit(tn2, true, false);
final NormalizeTableFilterParams ntfp = new NormalizeTableFilterParams.Builder()
.namespace("ns")
.build();
assertFalse(admin.normalizerSwitch(true));
assertTrue(admin.normalize(ntfp));
waitForTableSplit(tn1, tn1RegionCount + 1);
// confirm that tn1 has (tn1RegionCount + 1) number of regions.
// tn2 has tn2RegionCount number of regions because it's not a member of the target namespace.
assertEquals(
tn1 + " should have split.",
tn1RegionCount + 1,
MetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tn1));
waitForTableRegionCount(tn2, tn2RegionCount);
} finally {
dropIfExists(tn1);
dropIfExists(tn2);
}
}
@Test
public void testHonorsPatternFilter() throws Exception {
final TableName tn1 = TableName.valueOf(name.getMethodName() + "1");
final TableName tn2 = TableName.valueOf(name.getMethodName() + "2");
try {
final int tn1RegionCount = createTableBegsSplit(tn1, true, false);
final int tn2RegionCount = createTableBegsSplit(tn2, true, false);
final NormalizeTableFilterParams ntfp = new NormalizeTableFilterParams.Builder()
.regex(".*[1]")
.build();
assertFalse(admin.normalizerSwitch(true));
assertTrue(admin.normalize(ntfp));
waitForTableSplit(tn1, tn1RegionCount + 1);
// confirm that tn1 has (tn1RegionCount + 1) number of regions.
// tn2 has tn2RegionCount number of regions because it fails filter.
assertEquals(
tn1 + " should have split.",
tn1RegionCount + 1,
MetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tn1));
waitForTableRegionCount(tn2, tn2RegionCount);
} finally {
dropIfExists(tn1);
dropIfExists(tn2);
}
}
@Test
public void testHonorsNameFilter() throws Exception {
final TableName tn1 = TableName.valueOf(name.getMethodName() + "1");
final TableName tn2 = TableName.valueOf(name.getMethodName() + "2");
try {
final int tn1RegionCount = createTableBegsSplit(tn1, true, false);
final int tn2RegionCount = createTableBegsSplit(tn2, true, false);
final NormalizeTableFilterParams ntfp = new NormalizeTableFilterParams.Builder()
.tableNames(Collections.singletonList(tn1))
.build();
assertFalse(admin.normalizerSwitch(true));
assertTrue(admin.normalize(ntfp));
waitForTableSplit(tn1, tn1RegionCount + 1);
// confirm that tn1 has (tn1RegionCount + 1) number of regions.
// tn2 has tn3RegionCount number of regions because it fails filter:
assertEquals(
tn1 + " should have split.",
tn1RegionCount + 1,
MetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tn1));
waitForTableRegionCount(tn2, tn2RegionCount);
} finally {
dropIfExists(tn1);
dropIfExists(tn2);
}
}
private static TableName buildTableNameForQuotaTest(final String methodName) throws Exception {
String nsp = "np2"; String nsp = "np2";
NamespaceDescriptor nspDesc = NamespaceDescriptor nspDesc =
NamespaceDescriptor.create(nsp) NamespaceDescriptor.create(nsp)
.addConfiguration(TableNamespaceManager.KEY_MAX_REGIONS, "5") .addConfiguration(TableNamespaceManager.KEY_MAX_REGIONS, "5")
.addConfiguration(TableNamespaceManager.KEY_MAX_TABLES, "2").build(); .addConfiguration(TableNamespaceManager.KEY_MAX_TABLES, "2").build();
admin.createNamespace(nspDesc); admin.createNamespace(nspDesc);
return TableName.valueOf(nsp + TableName.NAMESPACE_DELIM + methodName); return TableName.valueOf(nsp, methodName);
} }
private static void waitForSkippedSplits(final HMaster master, private static void waitForSkippedSplits(final HMaster master,
@ -347,12 +441,13 @@ public class TestSimpleRegionNormalizerOnCluster {
*/ */
private static int createTableBegsSplit(final TableName tableName, private static int createTableBegsSplit(final TableName tableName,
final boolean normalizerEnabled, final boolean isMergeEnabled) final boolean normalizerEnabled, final boolean isMergeEnabled)
throws IOException { throws Exception {
final List<HRegion> generatedRegions = generateTestData(tableName, 1, 1, 2, 3, 5); final List<HRegion> generatedRegions = generateTestData(tableName, 1, 1, 2, 3, 5);
assertEquals(5, MetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName)); assertEquals(5, MetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName));
admin.flush(tableName); admin.flush(tableName);
final TableDescriptor td = TableDescriptorBuilder.newBuilder(admin.getDescriptor(tableName)) final TableDescriptor td = TableDescriptorBuilder
.newBuilder(admin.getDescriptor(tableName))
.setNormalizationEnabled(normalizerEnabled) .setNormalizationEnabled(normalizerEnabled)
.setMergeEnabled(isMergeEnabled) .setMergeEnabled(isMergeEnabled)
.build(); .build();
@ -383,13 +478,14 @@ public class TestSimpleRegionNormalizerOnCluster {
* <li>sum of sizes of first two regions < average</li> * <li>sum of sizes of first two regions < average</li>
* </ul> * </ul>
*/ */
private static int createTableBegsMerge(final TableName tableName) throws IOException { private static int createTableBegsMerge(final TableName tableName) throws Exception {
// create 5 regions with sizes to trigger merge of small regions // create 5 regions with sizes to trigger merge of small regions
final List<HRegion> generatedRegions = generateTestData(tableName, 1, 1, 3, 3, 5); final List<HRegion> generatedRegions = generateTestData(tableName, 1, 1, 3, 3, 5);
assertEquals(5, MetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName)); assertEquals(5, MetaTableAccessor.getRegionCount(TEST_UTIL.getConnection(), tableName));
admin.flush(tableName); admin.flush(tableName);
final TableDescriptor td = TableDescriptorBuilder.newBuilder(admin.getDescriptor(tableName)) final TableDescriptor td = TableDescriptorBuilder
.newBuilder(admin.getDescriptor(tableName))
.setNormalizationEnabled(true) .setNormalizationEnabled(true)
.build(); .build();
admin.modifyTable(td); admin.modifyTable(td);
@ -411,7 +507,7 @@ public class TestSimpleRegionNormalizerOnCluster {
return 5; return 5;
} }
private static void dropIfExists(final TableName tableName) throws IOException { private static void dropIfExists(final TableName tableName) throws Exception {
if (tableName != null && admin.tableExists(tableName)) { if (tableName != null && admin.tableExists(tableName)) {
if (admin.isTableEnabled(tableName)) { if (admin.isTableEnabled(tableName)) {
admin.disableTable(tableName); admin.disableTable(tableName);

View File

@ -234,9 +234,54 @@ module Hbase
#---------------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------------
# Requests region normalization for all configured tables in the cluster # Requests region normalization for all configured tables in the cluster
# Returns true if normalizer ran successfully # Returns true if normalize request was successfully submitted
def normalize def normalize(*args)
@admin.normalize builder = org.apache.hadoop.hbase.client.NormalizeTableFilterParams::Builder.new
args.each do |arg|
unless arg.is_a?(String) || arg.is_a?(Hash)
raise(ArgumentError, "#{arg.class} of #{arg.inspect} is not of Hash or String type")
end
if arg.key?(TABLE_NAME)
table_name = arg.delete(TABLE_NAME)
unless table_name.is_a?(String)
raise(ArgumentError, "#{TABLE_NAME} must be of type String")
end
builder.tableNames(java.util.Collections.singletonList(TableName.valueOf(table_name)))
elsif arg.key?(TABLE_NAMES)
table_names = arg.delete(TABLE_NAMES)
unless table_names.is_a?(Array)
raise(ArgumentError, "#{TABLE_NAMES} must be of type Array")
end
table_name_list = java.util.LinkedList.new
table_names.each do |tn|
unless tn.is_a?(String)
raise(ArgumentError, "#{TABLE_NAMES} value #{tn} must be of type String")
end
table_name_list.add(TableName.valueOf(tn))
end
builder.tableNames(table_name_list)
elsif arg.key?(REGEX)
regex = arg.delete(REGEX)
raise(ArgumentError, "#{REGEX} must be of type String") unless regex.is_a?(String)
builder.regex(regex)
elsif arg.key?(NAMESPACE)
namespace = arg.delete(NAMESPACE)
unless namespace.is_a?(String)
raise(ArgumentError, "#{NAMESPACE} must be of type String")
end
builder.namespace(namespace)
else
raise(ArgumentError, "Unrecognized argument #{arg}")
end
end
ntfp = builder.build
@admin.normalize(ntfp)
end end
#---------------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------------

View File

@ -69,6 +69,7 @@ module HBaseConstants
POLICY = 'POLICY'.freeze POLICY = 'POLICY'.freeze
RAW = 'RAW'.freeze RAW = 'RAW'.freeze
READ_TYPE = 'READ_TYPE'.freeze READ_TYPE = 'READ_TYPE'.freeze
REGEX = 'REGEX'.freeze
REGIONSERVER = 'REGIONSERVER'.freeze REGIONSERVER = 'REGIONSERVER'.freeze
REGION_REPLICATION = 'REGION_REPLICATION'.freeze REGION_REPLICATION = 'REGION_REPLICATION'.freeze
REGION_REPLICA_ID = 'REGION_REPLICA_ID'.freeze REGION_REPLICA_ID = 'REGION_REPLICA_ID'.freeze
@ -87,6 +88,8 @@ module HBaseConstants
STOPROW = 'STOPROW'.freeze STOPROW = 'STOPROW'.freeze
TABLE = 'TABLE'.freeze TABLE = 'TABLE'.freeze
TABLE_CFS = 'TABLE_CFS'.freeze TABLE_CFS = 'TABLE_CFS'.freeze
TABLE_NAME = 'TABLE_NAME'.freeze
TABLE_NAMES = 'TABLE_NAMES'.freeze
TIMERANGE = 'TIMERANGE'.freeze TIMERANGE = 'TIMERANGE'.freeze
TIMESTAMP = 'TIMESTAMP'.freeze TIMESTAMP = 'TIMESTAMP'.freeze
TYPE = 'TYPE'.freeze TYPE = 'TYPE'.freeze

View File

@ -22,18 +22,24 @@ module Shell
class Normalize < Command class Normalize < Command
def help def help
<<-EOF <<-EOF
Trigger region normalizer for all tables which have NORMALIZATION_ENABLED flag set. Returns true Trigger the region normalizer. Without arguments, invokes the normalizer without a table filter.
if normalizer ran successfully, false otherwise. Note that this command has no effect Any arguments are used to limit table selection. Returns true if the normalize request was
if region normalizer is disabled (make sure it's turned on using 'normalizer_switch' command). submitted successfully, false otherwise. Note that this command has no effect if region normalizer
is disabled (make sure it's turned on using 'normalizer_switch' command).
Examples: Examples:
hbase> normalize hbase> normalize
hbase> normalize TABLE_NAME => 'my_table'
hbase> normalize TABLE_NAMES => ['foo', 'bar', 'baz']
hbase> normalize REGEX => 'my_.*'
hbase> normalize NAMESPACE => 'ns1'
hbase> normalize NAMESPACE => 'ns', REGEX => '*._BIG_.*'
EOF EOF
end end
def command def command(*args)
did_normalize_run = !!admin.normalize did_normalize_run = !!admin.normalize(*args)
formatter.row([did_normalize_run.to_s]) formatter.row([did_normalize_run.to_s])
did_normalize_run did_normalize_run
end end

View File

@ -1,5 +1,4 @@
/** /*
*
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
@ -46,6 +45,7 @@ import org.apache.hadoop.hbase.client.CompactType;
import org.apache.hadoop.hbase.client.CompactionState; import org.apache.hadoop.hbase.client.CompactionState;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.LogQueryFilter; import org.apache.hadoop.hbase.client.LogQueryFilter;
import org.apache.hadoop.hbase.client.NormalizeTableFilterParams;
import org.apache.hadoop.hbase.client.OnlineLogRecord; import org.apache.hadoop.hbase.client.OnlineLogRecord;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.SnapshotDescription; import org.apache.hadoop.hbase.client.SnapshotDescription;
@ -796,7 +796,7 @@ public class ThriftAdmin implements Admin {
} }
@Override @Override
public boolean normalize() { public boolean normalize(NormalizeTableFilterParams ntfp) {
throw new NotImplementedException("normalize not supported in ThriftAdmin"); throw new NotImplementedException("normalize not supported in ThriftAdmin");
} }