HBASE-21047 Object creation of StoreFileScanner thru constructor and close may leave refCount to -1 (Vishal Khandelwal)

This commit is contained in:
Andrew Purtell 2018-08-16 11:42:15 -07:00
parent 145c92f3d6
commit b49941012a
3 changed files with 44 additions and 7 deletions

View File

@ -148,12 +148,18 @@ public class StoreFileReader {
*/ */
public StoreFileScanner getStoreFileScanner(boolean cacheBlocks, boolean pread, public StoreFileScanner getStoreFileScanner(boolean cacheBlocks, boolean pread,
boolean isCompaction, long readPt, long scannerOrder, boolean canOptimizeForNonNullColumn) { boolean isCompaction, long readPt, long scannerOrder, boolean canOptimizeForNonNullColumn) {
// Increment the ref count
refCount.incrementAndGet();
return new StoreFileScanner(this, getScanner(cacheBlocks, pread, isCompaction), return new StoreFileScanner(this, getScanner(cacheBlocks, pread, isCompaction),
!isCompaction, reader.hasMVCCInfo(), readPt, scannerOrder, canOptimizeForNonNullColumn); !isCompaction, reader.hasMVCCInfo(), readPt, scannerOrder, canOptimizeForNonNullColumn);
} }
/**
* Indicate that the scanner has started reading with this reader. We need to increment the ref
* count so reader is not close until some object is holding the lock
*/
void incrementRefCount() {
refCount.incrementAndGet();
}
/** /**
* Indicate that the scanner has finished reading with this reader. We need to decrement the ref * Indicate that the scanner has finished reading with this reader. We need to decrement the ref
* count, and also, if this is not the common pread reader, we should close it. * count, and also, if this is not the common pread reader, we should close it.

View File

@ -94,6 +94,7 @@ public class StoreFileScanner implements KeyValueScanner {
this.hasMVCCInfo = hasMVCC; this.hasMVCCInfo = hasMVCC;
this.scannerOrder = scannerOrder; this.scannerOrder = scannerOrder;
this.canOptimizeForNonNullColumn = canOptimizeForNonNullColumn; this.canOptimizeForNonNullColumn = canOptimizeForNonNullColumn;
this.reader.incrementRefCount();
} }
boolean isPrimaryReplica() { boolean isPrimaryReplica() {

View File

@ -31,6 +31,7 @@ import java.util.Map;
import java.util.OptionalLong; import java.util.OptionalLong;
import java.util.TreeSet; import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -39,7 +40,6 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
@ -48,6 +48,8 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.HFileLink; import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@ -65,6 +67,9 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ChecksumType;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hbase.thirdparty.com.google.common.base.Joiner;
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.ClassRule; import org.junit.ClassRule;
@ -74,10 +79,6 @@ import org.mockito.Mockito;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Joiner;
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
/** /**
* Test HStoreFile * Test HStoreFile
*/ */
@ -212,6 +213,35 @@ public class TestHStoreFile extends HBaseTestCase {
finalRow.length)); finalRow.length));
} }
@Test
public void testStoreFileReference() throws Exception {
final RegionInfo hri =
RegionInfoBuilder.newBuilder(TableName.valueOf("testStoreFileReference")).build();
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs,
new Path(testDir, hri.getTable().getNameAsString()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
.withFilePath(regionFs.createTempName()).withFileContext(meta).build();
writeStoreFile(writer);
Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
writer.close();
HStoreFile file = new HStoreFile(this.fs, hsfPath, conf, cacheConf, BloomType.NONE, true);
file.initReader();
StoreFileReader r = file.getReader();
assertNotNull(r);
StoreFileScanner scanner =
new StoreFileScanner(r, mock(HFileScanner.class), false, false, 0, 0, false);
// Verify after instantiating scanner refCount is increased
assertTrue("Verify file is being referenced", file.isReferencedInReads());
scanner.close();
// Verify after closing scanner refCount is decreased
assertFalse("Verify file is not being referenced", file.isReferencedInReads());
}
@Test @Test
public void testEmptyStoreFileRestrictKeyRanges() throws Exception { public void testEmptyStoreFileRestrictKeyRanges() throws Exception {
StoreFileReader reader = mock(StoreFileReader.class); StoreFileReader reader = mock(StoreFileReader.class);