HBASE-5569 Do not collect deleted KVs when they are still in use by a scanner.
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1301135 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
d73aee4f66
commit
5055d4cfff
|
@ -308,7 +308,8 @@ public class ScanQueryMatcher {
|
||||||
}
|
}
|
||||||
// note the following next else if...
|
// note the following next else if...
|
||||||
// delete marker are not subject to other delete markers
|
// delete marker are not subject to other delete markers
|
||||||
} else if (!this.deletes.isEmpty()) {
|
} else if (!this.deletes.isEmpty()
|
||||||
|
&& kv.getMemstoreTS() <= maxReadPointToTrackVersions) {
|
||||||
DeleteResult deleteResult = deletes.isDeleted(bytes, offset, qualLength,
|
DeleteResult deleteResult = deletes.isDeleted(bytes, offset, qualLength,
|
||||||
timestamp);
|
timestamp);
|
||||||
switch (deleteResult) {
|
switch (deleteResult) {
|
||||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.client.Put;
|
||||||
import org.apache.hadoop.hbase.client.Result;
|
import org.apache.hadoop.hbase.client.Result;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
|
||||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
|
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
|
||||||
import org.junit.experimental.categories.Category;
|
import org.junit.experimental.categories.Category;
|
||||||
|
|
||||||
|
@ -55,10 +54,7 @@ public class TestAtomicOperation extends HBaseTestCase {
|
||||||
|
|
||||||
HRegion region = null;
|
HRegion region = null;
|
||||||
private HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
private HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
||||||
private final String DIR = TEST_UTIL.getDataTestDir("TestIncrement").toString();
|
private final String DIR = TEST_UTIL.getDataTestDir("TestAtomicOperation").toString();
|
||||||
|
|
||||||
|
|
||||||
private final int MAX_VERSIONS = 2;
|
|
||||||
|
|
||||||
// Test names
|
// Test names
|
||||||
static final byte[] tableName = Bytes.toBytes("testtable");;
|
static final byte[] tableName = Bytes.toBytes("testtable");;
|
||||||
|
@ -258,10 +254,10 @@ public class TestAtomicOperation extends HBaseTestCase {
|
||||||
LOG.info("Starting test testRowMutationMultiThreads");
|
LOG.info("Starting test testRowMutationMultiThreads");
|
||||||
initHRegion(tableName, getName(), fam1);
|
initHRegion(tableName, getName(), fam1);
|
||||||
|
|
||||||
// create 100 threads, each will alternate between adding and
|
// create 50 threads, each will alternate between adding and
|
||||||
// removing a column
|
// removing a column
|
||||||
int numThreads = 100;
|
int numThreads = 50;
|
||||||
int opsPerThread = 1000;
|
int opsPerThread = 500;
|
||||||
AtomicOperation[] all = new AtomicOperation[numThreads];
|
AtomicOperation[] all = new AtomicOperation[numThreads];
|
||||||
|
|
||||||
AtomicLong timeStamps = new AtomicLong(0);
|
AtomicLong timeStamps = new AtomicLong(0);
|
||||||
|
@ -340,10 +336,10 @@ public class TestAtomicOperation extends HBaseTestCase {
|
||||||
LOG.info("Starting test testMultiRowMutationMultiThreads");
|
LOG.info("Starting test testMultiRowMutationMultiThreads");
|
||||||
initHRegion(tableName, getName(), fam1);
|
initHRegion(tableName, getName(), fam1);
|
||||||
|
|
||||||
// create 100 threads, each will alternate between adding and
|
// create 50 threads, each will alternate between adding and
|
||||||
// removing a column
|
// removing a column
|
||||||
int numThreads = 100;
|
int numThreads = 50;
|
||||||
int opsPerThread = 1000;
|
int opsPerThread = 500;
|
||||||
AtomicOperation[] all = new AtomicOperation[numThreads];
|
AtomicOperation[] all = new AtomicOperation[numThreads];
|
||||||
|
|
||||||
AtomicLong timeStamps = new AtomicLong(0);
|
AtomicLong timeStamps = new AtomicLong(0);
|
||||||
|
|
Loading…
Reference in New Issue