HBASE-26777 BufferedDataBlockEncoder$OffheapDecodedExtendedCell.deepC… (#4139)

Signed-off-by: Andrew Purtell <apurtell@apache.org>
This commit is contained in:
Istvan Toth 2022-03-04 22:49:19 +01:00 committed by GitHub
parent f4b05a8364
commit 7d2457e075
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 24 additions and 5 deletions

View File

@ -131,6 +131,8 @@ public class KeyValueUtil {
//Cell#getSerializedSize returns the serialized size of the Source cell, which may //Cell#getSerializedSize returns the serialized size of the Source cell, which may
//not serialize all fields. We are constructing a KeyValue backing array here, //not serialize all fields. We are constructing a KeyValue backing array here,
//which does include all fields, and must allocate accordingly. //which does include all fields, and must allocate accordingly.
//TODO we could probably use Cell#getSerializedSize safely, the errors were
//caused by cells corrupted by use-after-free bugs
int v1Length = length(cell.getRowLength(), cell.getFamilyLength(), int v1Length = length(cell.getRowLength(), cell.getFamilyLength(),
cell.getQualifierLength(), cell.getValueLength(), cell.getTagsLength(), true); cell.getQualifierLength(), cell.getValueLength(), cell.getTagsLength(), true);
byte[] backingBytes = new byte[v1Length]; byte[] backingBytes = new byte[v1Length];

View File

@ -94,6 +94,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode; import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.MetaCellComparator; import org.apache.hadoop.hbase.MetaCellComparator;
import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.NotServingRegionException;
@ -7871,7 +7872,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
// See more details in HBASE-26036. // See more details in HBASE-26036.
for (Cell cell : tmp) { for (Cell cell : tmp) {
results.add(cell instanceof ByteBufferExtendedCell ? results.add(cell instanceof ByteBufferExtendedCell ?
((ByteBufferExtendedCell) cell).deepClone(): cell); KeyValueUtil.copyToNewKeyValue(cell) : cell);
} }
} }

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.ByteBuffAllocator; import org.apache.hadoop.hbase.io.ByteBuffAllocator;
import org.apache.hadoop.hbase.io.DeallocateRewriteByteBuffAllocator; import org.apache.hadoop.hbase.io.DeallocateRewriteByteBuffAllocator;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory; import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem; import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
@ -87,8 +88,20 @@ public class TestCheckAndMutateWithByteBuff {
} }
@Test @Test
public void testCheckAndMutateWithByteBuff() throws Exception { public void testCheckAndMutateWithByteBuffNoEncode() throws Exception {
Table testTable = createTable(TableName.valueOf(name.getMethodName())); testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.NONE);
}
@Test
public void testCheckAndMutateWithByteBuffEncode() throws Exception {
// Tests for HBASE-26777.
// As most HBase.getRegion() calls have been factored out from HBase, you'd need to revert
// both HBASE-26777, and the HBase.get() replacements from HBASE-26036 for this test to fail
testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.FAST_DIFF);
}
private void testCheckAndMutateWithByteBuff(TableName tableName, DataBlockEncoding dbe) throws Exception {
Table testTable = createTable(tableName, dbe);
byte[] checkRow = Bytes.toBytes("checkRow"); byte[] checkRow = Bytes.toBytes("checkRow");
byte[] checkQualifier = Bytes.toBytes("cq"); byte[] checkQualifier = Bytes.toBytes("cq");
byte[] checkValue = Bytes.toBytes("checkValue"); byte[] checkValue = Bytes.toBytes("checkValue");
@ -104,10 +117,13 @@ public class TestCheckAndMutateWithByteBuff {
Bytes.toBytes("testValue")))); Bytes.toBytes("testValue"))));
} }
private Table createTable(TableName tableName) private Table createTable(TableName tableName, DataBlockEncoding dbe)
throws IOException { throws IOException {
TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName) TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF).setBlocksize(100).build()) .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF)
.setBlocksize(100)
.setDataBlockEncoding(dbe)
.build())
.build(); .build();
return TEST_UTIL.createTable(td, null); return TEST_UTIL.createTable(td, null);
} }