HBASE-26777 BufferedDataBlockEncoder$OffheapDecodedExtendedCell.deepC… (#4139)

Signed-off-by: Andrew Purtell <apurtell@apache.org>
This commit is contained in:
Istvan Toth 2022-03-04 22:49:19 +01:00 committed by Andrew Purtell
parent bed30b153b
commit eb1c57d12e
3 changed files with 24 additions and 5 deletions

View File

@ -131,6 +131,8 @@ public class KeyValueUtil {
//Cell#getSerializedSize returns the serialized size of the Source cell, which may
//not serialize all fields. We are constructing a KeyValue backing array here,
//which does include all fields, and must allocate accordingly.
//TODO we could probably use Cell#getSerializedSize safely, the errors were
//caused by cells corrupted by use-after-free bugs
int v1Length = length(cell.getRowLength(), cell.getFamilyLength(),
cell.getQualifierLength(), cell.getValueLength(), cell.getTagsLength(), true);
byte[] backingBytes = new byte[v1Length];

View File

@ -97,6 +97,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.MetaCellComparator;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
@ -7585,7 +7586,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
// See more details in HBASE-26036.
for (Cell cell : tmp) {
results.add(cell instanceof ByteBufferExtendedCell ?
((ByteBufferExtendedCell) cell).deepClone(): cell);
KeyValueUtil.copyToNewKeyValue(cell) : cell);
}
}

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.ByteBuffAllocator;
import org.apache.hadoop.hbase.io.DeallocateRewriteByteBuffAllocator;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
@ -86,8 +87,20 @@ public class TestCheckAndMutateWithByteBuff {
}
@Test
public void testCheckAndMutateWithByteBuff() throws Exception {
Table testTable = createTable(TableName.valueOf(name.getMethodName()));
public void testCheckAndMutateWithByteBuffNoEncode() throws Exception {
testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.NONE);
}
@Test
public void testCheckAndMutateWithByteBuffEncode() throws Exception {
// Tests for HBASE-26777.
// As most HBase.getRegion() calls have been factored out from HBase, you'd need to revert
// both HBASE-26777, and the HBase.get() replacements from HBASE-26036 for this test to fail
testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.FAST_DIFF);
}
private void testCheckAndMutateWithByteBuff(TableName tableName, DataBlockEncoding dbe) throws Exception {
Table testTable = createTable(tableName, dbe);
byte[] checkRow = Bytes.toBytes("checkRow");
byte[] checkQualifier = Bytes.toBytes("cq");
byte[] checkValue = Bytes.toBytes("checkValue");
@ -103,10 +116,13 @@ public class TestCheckAndMutateWithByteBuff {
Bytes.toBytes("testValue"))));
}
private Table createTable(TableName tableName)
private Table createTable(TableName tableName, DataBlockEncoding dbe)
throws IOException {
TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF).setBlocksize(100).build())
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF)
.setBlocksize(100)
.setDataBlockEncoding(dbe)
.build())
.build();
return TEST_UTIL.createTable(td, null);
}