HBASE-26261 Store configuration loss when use update_config (#3664)
Signed-off-by: Duo Zhang <zhangduo@apache.org>
This commit is contained in:
parent
d75d73bcc7
commit
6f68d2765c
|
@ -69,6 +69,7 @@ import org.apache.hadoop.hbase.backup.FailedArchiveException;
|
||||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
|
||||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
|
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||||
import org.apache.hadoop.hbase.conf.ConfigurationManager;
|
import org.apache.hadoop.hbase.conf.ConfigurationManager;
|
||||||
import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
|
import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
|
||||||
import org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration;
|
import org.apache.hadoop.hbase.coprocessor.ReadOnlyConfiguration;
|
||||||
|
@ -246,14 +247,7 @@ public class HStore implements Store, HeapSize, StoreConfigInformation,
|
||||||
protected HStore(final HRegion region, final ColumnFamilyDescriptor family,
|
protected HStore(final HRegion region, final ColumnFamilyDescriptor family,
|
||||||
final Configuration confParam, boolean warmup) throws IOException {
|
final Configuration confParam, boolean warmup) throws IOException {
|
||||||
|
|
||||||
// 'conf' renamed to 'confParam' b/c we use this.conf in the constructor
|
this.conf = StoreUtils.createStoreConfiguration(confParam, region.getTableDescriptor(), family);
|
||||||
// CompoundConfiguration will look for keys in reverse order of addition, so we'd
|
|
||||||
// add global config first, then table and cf overrides, then cf metadata.
|
|
||||||
this.conf = new CompoundConfiguration()
|
|
||||||
.add(confParam)
|
|
||||||
.addBytesMap(region.getTableDescriptor().getValues())
|
|
||||||
.addStringMap(family.getConfiguration())
|
|
||||||
.addBytesMap(family.getValues());
|
|
||||||
|
|
||||||
this.region = region;
|
this.region = region;
|
||||||
this.storeContext = initializeStoreContext(family);
|
this.storeContext = initializeStoreContext(family);
|
||||||
|
@ -2622,14 +2616,10 @@ public class HStore implements Store, HeapSize, StoreConfigInformation,
|
||||||
return this.offPeakHours;
|
return this.offPeakHours;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* {@inheritDoc}
|
|
||||||
*/
|
|
||||||
@Override
|
@Override
|
||||||
public void onConfigurationChange(Configuration conf) {
|
public void onConfigurationChange(Configuration conf) {
|
||||||
this.conf = new CompoundConfiguration()
|
this.conf = StoreUtils.createStoreConfiguration(conf, region.getTableDescriptor(),
|
||||||
.add(conf)
|
getColumnFamilyDescriptor());
|
||||||
.addBytesMap(getColumnFamilyDescriptor().getValues());
|
|
||||||
this.storeEngine.compactionPolicy.setConf(conf);
|
this.storeEngine.compactionPolicy.setConf(conf);
|
||||||
this.offPeakHours = OffPeakHours.getInstance(conf);
|
this.offPeakHours = OffPeakHours.getInstance(conf);
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,10 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellComparator;
|
import org.apache.hadoop.hbase.CellComparator;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
|
import org.apache.hadoop.hbase.CompoundConfiguration;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||||
import org.apache.hadoop.hbase.util.ChecksumType;
|
import org.apache.hadoop.hbase.util.ChecksumType;
|
||||||
import org.apache.yetus.audience.InterfaceAudience;
|
import org.apache.yetus.audience.InterfaceAudience;
|
||||||
|
@ -161,4 +164,11 @@ public class StoreUtils {
|
||||||
HFile.DEFAULT_BYTES_PER_CHECKSUM);
|
HFile.DEFAULT_BYTES_PER_CHECKSUM);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Configuration createStoreConfiguration(Configuration conf, TableDescriptor td,
|
||||||
|
ColumnFamilyDescriptor cfd) {
|
||||||
|
// CompoundConfiguration will look for keys in reverse order of addition, so we'd
|
||||||
|
// add global config first, then table and cf overrides, then cf metadata.
|
||||||
|
return new CompoundConfiguration().add(conf).addBytesMap(td.getValues())
|
||||||
|
.addStringMap(cfd.getConfiguration()).addBytesMap(cfd.getValues());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.RegionLocator;
|
import org.apache.hadoop.hbase.client.RegionLocator;
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
|
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
|
import org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration;
|
||||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
@ -68,13 +69,16 @@ public class TestRegionServerOnlineConfigChange {
|
||||||
private final static String columnFamily1Str = "columnFamily1";
|
private final static String columnFamily1Str = "columnFamily1";
|
||||||
private final static TableName TABLE1 = TableName.valueOf(table1Str);
|
private final static TableName TABLE1 = TableName.valueOf(table1Str);
|
||||||
private final static byte[] COLUMN_FAMILY1 = Bytes.toBytes(columnFamily1Str);
|
private final static byte[] COLUMN_FAMILY1 = Bytes.toBytes(columnFamily1Str);
|
||||||
|
private final static long MAX_FILE_SIZE = 20 * 1024 * 1024L;
|
||||||
|
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setUp() throws Exception {
|
public static void setUp() throws Exception {
|
||||||
conf = hbaseTestingUtility.getConfiguration();
|
conf = hbaseTestingUtility.getConfiguration();
|
||||||
hbaseTestingUtility.startMiniCluster();
|
hbaseTestingUtility.startMiniCluster();
|
||||||
t1 = hbaseTestingUtility.createTable(TABLE1, COLUMN_FAMILY1);
|
t1 = hbaseTestingUtility.createTable(
|
||||||
|
TableDescriptorBuilder.newBuilder(TABLE1).setMaxFileSize(MAX_FILE_SIZE).build(),
|
||||||
|
new byte[][] { COLUMN_FAMILY1 }, conf);
|
||||||
try (RegionLocator locator = hbaseTestingUtility.getConnection().getRegionLocator(TABLE1)) {
|
try (RegionLocator locator = hbaseTestingUtility.getConnection().getRegionLocator(TABLE1)) {
|
||||||
HRegionInfo firstHRI = locator.getAllRegionLocations().get(0).getRegionInfo();
|
HRegionInfo firstHRI = locator.getAllRegionLocations().get(0).getRegionInfo();
|
||||||
r1name = firstHRI.getRegionName();
|
r1name = firstHRI.getRegionName();
|
||||||
|
@ -225,4 +229,12 @@ public class TestRegionServerOnlineConfigChange {
|
||||||
assertEquals(newMajorCompactionJitter,
|
assertEquals(newMajorCompactionJitter,
|
||||||
hstore.getStoreEngine().getCompactionPolicy().getConf().getMajorCompactionJitter(), 0.00001);
|
hstore.getStoreEngine().getCompactionPolicy().getConf().getMajorCompactionJitter(), 0.00001);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testStoreConfigurationOnlineChange() {
|
||||||
|
rs1.getConfigurationManager().notifyAllObservers(conf);
|
||||||
|
long actualMaxFileSize = r1.getStore(COLUMN_FAMILY1).getReadOnlyConfiguration()
|
||||||
|
.getLong(TableDescriptorBuilder.MAX_FILESIZE, -1);
|
||||||
|
assertEquals(MAX_FILE_SIZE, actualMaxFileSize);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue