HBASE-23581 Creating table gets stuck when specifying an invalid split policy as METADATA (#942)

Signed-off-by: Lijin Bin <binlijin@apache.org>
Signed-off-by: Anoop Sam John <anoopsamjohn@apacher.org>
Signed-off-by: Xu Cang <xucang@apache.org>
This commit is contained in:
Toshihiro Suzuki 2019-12-24 20:16:31 +09:00 committed by binlijin
parent fc15ea7546
commit 00bb0f20d4
3 changed files with 18 additions and 26 deletions

View File

@ -169,6 +169,7 @@ import org.apache.hadoop.hbase.util.HashedBytes;
import org.apache.hadoop.hbase.util.NonceKey;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.TableDescriptorChecker;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
@ -7377,14 +7378,14 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
throws IOException {
try {
// Refuse to open the region if we are missing local compression support
checkCompressionCodecs();
TableDescriptorChecker.checkCompression(htableDescriptor);
// Refuse to open the region if encryption configuration is incorrect or
// codec support is missing
LOG.debug("checking encryption for " + this.getRegionInfo().getEncodedName());
checkEncryption();
TableDescriptorChecker.checkEncryption(conf, htableDescriptor);
// Refuse to open the region if a required class cannot be loaded
LOG.debug("checking classloading for " + this.getRegionInfo().getEncodedName());
checkClassLoading();
TableDescriptorChecker.checkClassLoading(conf, htableDescriptor);
this.openSeqNum = initialize(reporter);
this.mvcc.advanceTo(openSeqNum);
// The openSeqNum must be increased every time when a region is assigned, as we rely on it to
@ -7454,25 +7455,6 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
r.initializeWarmup(reporter);
}
private void checkCompressionCodecs() throws IOException {
for (ColumnFamilyDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompressionType());
CompressionTest.testCompression(fam.getCompactionCompressionType());
}
}
private void checkEncryption() throws IOException {
for (ColumnFamilyDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
EncryptionTest.testEncryption(conf, fam.getEncryptionType(), fam.getEncryptionKey());
}
}
private void checkClassLoading() throws IOException {
RegionSplitPolicy.getSplitPolicyClass(this.htableDescriptor, conf);
RegionCoprocessorHost.testTableCoprocessorAttrs(conf, this.htableDescriptor);
}
/**
* Computes the Path of the HRegion
*

View File

@ -63,8 +63,12 @@ public final class TableDescriptorChecker {
* Checks whether the table conforms to some sane limits, and configured
* values (compression, etc) work. Throws an exception if something is wrong.
*/
public static void sanityCheck(final Configuration conf, final TableDescriptor td)
public static void sanityCheck(final Configuration c, final TableDescriptor td)
throws IOException {
CompoundConfiguration conf = new CompoundConfiguration()
.add(c)
.addBytesMap(td.getValues());
// Setting this to true logs the warning instead of throwing exception
boolean logWarn = false;
if (!conf.getBoolean(TABLE_SANITY_CHECKS, DEFAULT_TABLE_SANITY_CHECKS)) {
@ -276,21 +280,21 @@ public final class TableDescriptorChecker {
}
}
private static void checkCompression(final TableDescriptor td) throws IOException {
public static void checkCompression(final TableDescriptor td) throws IOException {
for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {
CompressionTest.testCompression(cfd.getCompressionType());
CompressionTest.testCompression(cfd.getCompactionCompressionType());
}
}
private static void checkEncryption(final Configuration conf, final TableDescriptor td)
public static void checkEncryption(final Configuration conf, final TableDescriptor td)
throws IOException {
for (ColumnFamilyDescriptor cfd : td.getColumnFamilies()) {
EncryptionTest.testEncryption(conf, cfd.getEncryptionType(), cfd.getEncryptionKey());
}
}
private static void checkClassLoading(final Configuration conf, final TableDescriptor td)
public static void checkClassLoading(final Configuration conf, final TableDescriptor td)
throws IOException {
RegionSplitPolicy.getSplitPolicyClass(td, conf);
RegionCoprocessorHost.testTableCoprocessorAttrs(conf, td);

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.ClientTests;
@ -112,6 +113,11 @@ public class TestIllegalTableDescriptor {
htd.setRegionSplitPolicyClassName(null);
checkTableIsLegal(htd);
htd.setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY, "nonexisting.foo.class");
checkTableIsIllegal(htd);
htd.remove(HConstants.HBASE_REGION_SPLIT_POLICY_KEY);
checkTableIsLegal(htd);
hcd.setBlocksize(0);
checkTableIsIllegal(htd);
hcd.setBlocksize(1024 * 1024 * 128); // 128M