HBASE-26467 Fix bug for MemStoreLABImpl.forceCopyOfBigCellInto(Cell) (#3858)

Signed-off-by: Duo Zhang <zhangduo@apache.org>
Reviewed-by: chenglei <chenglei@apache.org>
This commit is contained in:
zhengzhuobinzzb 2021-11-21 20:59:11 +08:00 committed by Duo Zhang
parent 8e5a3b415f
commit 9b627905c7
3 changed files with 60 additions and 4 deletions

View File

@ -124,9 +124,8 @@ public class MemStoreLABImpl implements MemStoreLAB {
@Override
public Cell forceCopyOfBigCellInto(Cell cell) {
int size = Segment.getCellLength(cell);
size += ChunkCreator.SIZEOF_CHUNK_HEADER;
Preconditions.checkArgument(size >= 0, "negative size");
if (size <= dataChunkSize) {
if (size + ChunkCreator.SIZEOF_CHUNK_HEADER <= dataChunkSize) {
// Using copyCellInto for cells which are bigger than the original maxAlloc
return copyCellInto(cell, dataChunkSize);
} else {

View File

@ -24,6 +24,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
@ -799,7 +800,7 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore
totalHeapSize = MutableSegment.DEEP_OVERHEAD + CellChunkImmutableSegment.DEEP_OVERHEAD_CCM
+ numOfCells * oneCellOnCCMHeapSize;
assertEquals(totalCellsLen+ChunkCreator.SIZEOF_CHUNK_HEADER, regionServicesForStores
assertEquals(totalCellsLen, regionServicesForStores
.getMemStoreSize());
assertEquals(totalHeapSize, ((CompactingMemStore) memstore).heapSize());
@ -905,6 +906,31 @@ public class TestCompactingToCellFlatMapMemStore extends TestCompactingMemStore
}
}
/**
* Test big cell size after in memory compaction. (HBASE-26467)
*/
@Test
public void testBigCellSizeAfterInMemoryCompaction() throws IOException {
MemoryCompactionPolicy compactionType = MemoryCompactionPolicy.BASIC;
memstore.getConfiguration().setInt(MemStoreCompactionStrategy
.COMPACTING_MEMSTORE_THRESHOLD_KEY, 1);
memstore.getConfiguration().set(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY,
String.valueOf(compactionType));
((MyCompactingMemStore) memstore).initiateType(compactionType, memstore.getConfiguration());
byte[] val = new byte[MemStoreLAB.CHUNK_SIZE_DEFAULT];
long size = addRowsByKeys(memstore, new String[]{"A"}, val);
((MyCompactingMemStore) memstore).flushInMemory();
for(KeyValueScanner scanner : memstore.getScanners(Long.MAX_VALUE)) {
Cell cell;
while ((cell = scanner.next()) != null) {
assertEquals(size, cell.getSerializedSize());
}
}
}
private long addRowsByKeysDataSize(final AbstractMemStore hmc, String[] keys) {
byte[] fam = Bytes.toBytes("testfamily");

View File

@ -16,6 +16,8 @@
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.regionserver.MemStoreLAB.CHUNK_SIZE_KEY;
import static org.apache.hadoop.hbase.regionserver.MemStoreLAB.MAX_ALLOC_KEY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@ -38,6 +40,7 @@ import org.apache.hadoop.hbase.io.util.MemorySizeUtil;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
@ -212,7 +215,7 @@ public class TestMemStoreLAB {
Configuration conf = HBaseConfiguration.create();
conf.setDouble(MemStoreLAB.CHUNK_POOL_MAXSIZE_KEY, 0.1);
// set chunk size to default max alloc size, so we could easily trigger chunk retirement
conf.setLong(MemStoreLABImpl.CHUNK_SIZE_KEY, MemStoreLABImpl.MAX_ALLOC_DEFAULT);
conf.setLong(CHUNK_SIZE_KEY, MemStoreLABImpl.MAX_ALLOC_DEFAULT);
// reconstruct mslab
long globalMemStoreLimit = (long) (ManagementFactory.getMemoryMXBean().getHeapMemoryUsage()
.getMax() * MemorySizeUtil.getGlobalMemStoreHeapPercent(conf, false));
@ -266,6 +269,34 @@ public class TestMemStoreLAB {
}
}
/**
* Test cell with right length, which constructed by testForceCopyOfBigCellInto. (HBASE-26467)
*/
@Test
public void testForceCopyOfBigCellInto() {
Configuration conf = HBaseConfiguration.create();
int chunkSize = ChunkCreator.getInstance().getChunkSize();
conf.setInt(CHUNK_SIZE_KEY, chunkSize);
conf.setInt(MAX_ALLOC_KEY, chunkSize / 2);
MemStoreLABImpl mslab = new MemStoreLABImpl(conf);
byte[] row = Bytes.toBytes("row");
byte[] columnFamily = Bytes.toBytes("columnFamily");
byte[] qualify = Bytes.toBytes("qualify");
byte[] smallValue = new byte[chunkSize / 2];
byte[] bigValue = new byte[chunkSize];
KeyValue smallKV = new KeyValue(row, columnFamily, qualify, EnvironmentEdgeManager
.currentTime(), smallValue);
assertEquals(smallKV.getSerializedSize(),
mslab.forceCopyOfBigCellInto(smallKV).getSerializedSize());
KeyValue bigKV = new KeyValue(row, columnFamily, qualify, EnvironmentEdgeManager
.currentTime(), bigValue);
assertEquals(bigKV.getSerializedSize(),
mslab.forceCopyOfBigCellInto(bigKV).getSerializedSize());
}
private Thread getChunkQueueTestThread(final MemStoreLABImpl mslab, String threadName,
Cell cellToCopyInto) {
Thread thread = new Thread() {