diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/UserScanQueryMatcher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/UserScanQueryMatcher.java index e88e3a0ce1e..9d67b2cfc6b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/UserScanQueryMatcher.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/UserScanQueryMatcher.java @@ -156,11 +156,13 @@ public abstract class UserScanQueryMatcher extends ScanQueryMatcher { filter.filterCell(cell)); } - /* - * Call this when scan has filter. Decide the desired behavior by checkVersions's MatchCode - * and filterCell's ReturnCode. Cell may be skipped by filter, so the column versions - * in result may be less than user need. It will check versions again after filter. + /** + * Call this when scan has filter. Decide the desired behavior by checkVersions's MatchCode and + * filterCell's ReturnCode. Cell may be skipped by filter, so the column versions in result may be + * less than user need. It need to check versions again when filter and columnTracker both include + * the cell.
* + *
    * ColumnChecker                FilterResponse               Desired behavior
    * INCLUDE                      SKIP                         SKIP
    * INCLUDE                      NEXT_COL                     SEEK_NEXT_COL or SEEK_NEXT_ROW
@@ -183,6 +185,7 @@ public abstract class UserScanQueryMatcher extends ScanQueryMatcher {
    * INCLUDE_AND_SEEK_NEXT_ROW    INCLUDE                      INCLUDE_AND_SEEK_NEXT_ROW
    * INCLUDE_AND_SEEK_NEXT_ROW    INCLUDE_AND_NEXT_COL         INCLUDE_AND_SEEK_NEXT_ROW
    * INCLUDE_AND_SEEK_NEXT_ROW    INCLUDE_AND_SEEK_NEXT_ROW    INCLUDE_AND_SEEK_NEXT_ROW
+   * 
*/ private final MatchCode mergeFilterResponse(Cell cell, MatchCode matchCode, ReturnCode filterResponse) { @@ -212,12 +215,10 @@ public abstract class UserScanQueryMatcher extends ScanQueryMatcher { case INCLUDE_AND_NEXT_COL: if (matchCode == MatchCode.INCLUDE) { matchCode = MatchCode.INCLUDE_AND_SEEK_NEXT_COL; - // Update column tracker to next column, As we use the column hint from the tracker to seek - // to next cell - columns.doneWithColumn(cell); } break; case INCLUDE_AND_SEEK_NEXT_ROW: + matchCode = MatchCode.INCLUDE_AND_SEEK_NEXT_ROW; break; default: throw new RuntimeException("UNEXPECTED"); @@ -227,18 +228,24 @@ public abstract class UserScanQueryMatcher extends ScanQueryMatcher { assert matchCode == MatchCode.INCLUDE || matchCode == MatchCode.INCLUDE_AND_SEEK_NEXT_COL || matchCode == MatchCode.INCLUDE_AND_SEEK_NEXT_ROW; - if (matchCode == MatchCode.INCLUDE_AND_SEEK_NEXT_COL - || matchCode == MatchCode.INCLUDE_AND_SEEK_NEXT_ROW) { - return matchCode; - } - - // Now we will check versions again. + // We need to make sure that the number of cells returned will not exceed max version in scan + // when the match code is INCLUDE* case. if (curColCell == null || !CellUtil.matchingRowColumn(cell, curColCell)) { count = 0; curColCell = cell; } count += 1; - return count > versionsAfterFilter ? MatchCode.SEEK_NEXT_COL : MatchCode.INCLUDE; + + if (count > versionsAfterFilter) { + return MatchCode.SEEK_NEXT_COL; + } else { + if (matchCode == MatchCode.INCLUDE_AND_SEEK_NEXT_COL) { + // Update column tracker to next column, As we use the column hint from the tracker to seek + // to next cell + columns.doneWithColumn(cell); + } + return matchCode; + } } protected abstract boolean isGet(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java index f0060e4d888..5441e2b4f03 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java @@ -31,6 +31,7 @@ import java.util.stream.IntStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; +import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -40,8 +41,10 @@ import org.apache.hadoop.hbase.HTestConst; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; import org.apache.hadoop.hbase.filter.ColumnRangeFilter; +import org.apache.hadoop.hbase.filter.QualifierFilter; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -816,4 +819,26 @@ public class TestScannersFromClientSide { } } } + + @Test + public void testScanWithColumnsAndFilterAndVersion() throws IOException { + TableName tableName = TableName.valueOf(name.getMethodName()); + try (Table table = TEST_UTIL.createTable(tableName, FAMILY, 4)) { + for (int i = 0; i < 4; i++) { + Put put = new Put(ROW); + put.addColumn(FAMILY, QUALIFIER, VALUE); + table.put(put); + } + + Scan scan = new Scan(); + scan.addColumn(FAMILY, QUALIFIER); + scan.setFilter(new QualifierFilter(CompareOperator.EQUAL, new BinaryComparator(QUALIFIER))); + scan.readVersions(3); + + try (ResultScanner scanner = table.getScanner(scan)) { + Result result = scanner.next(); + assertEquals(3, result.size()); + } + } + } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java index aec93cbb3a3..b1b8b25701b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/querymatcher/TestUserScanQueryMatcher.java @@ -29,6 +29,8 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.regionserver.ScanInfo; import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher.MatchCode; import org.apache.hadoop.hbase.testclassification.RegionServerTests; @@ -206,15 +208,17 @@ public class TestUserScanQueryMatcher extends AbstractTestScanQueryMatcher { public void testMatch_ExpiredWildcard() throws IOException { long testTTL = 1000; - MatchCode[] expected = new MatchCode[] { ScanQueryMatcher.MatchCode.INCLUDE, - ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, - ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, - ScanQueryMatcher.MatchCode.DONE }; + MatchCode[] expected = + new MatchCode[] { ScanQueryMatcher.MatchCode.INCLUDE, ScanQueryMatcher.MatchCode.INCLUDE, + ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.INCLUDE, + ScanQueryMatcher.MatchCode.SEEK_NEXT_COL, ScanQueryMatcher.MatchCode.DONE }; long now = EnvironmentEdgeManager.currentTime(); - UserScanQueryMatcher qm = UserScanQueryMatcher.create(scan, new ScanInfo(this.conf, fam2, 0, 1, - testTTL, KeepDeletedCells.FALSE, HConstants.DEFAULT_BLOCKSIZE, 0, rowComparator, false), - null, now - testTTL, now, null); + UserScanQueryMatcher qm = + UserScanQueryMatcher.create(scan, + new ScanInfo(this.conf, fam2, 0, 1, testTTL, KeepDeletedCells.FALSE, + HConstants.DEFAULT_BLOCKSIZE, 0, rowComparator, false), + null, now - testTTL, now, null); KeyValue[] kvs = new KeyValue[] { new KeyValue(row1, fam2, col1, now - 100, data), new KeyValue(row1, fam2, col2, now - 50, data), @@ -236,4 +240,49 @@ public class TestUserScanQueryMatcher extends AbstractTestScanQueryMatcher { assertEquals(expected[i], actual.get(i)); } } + + class TestFilter extends FilterBase { + + @Override + public ReturnCode filterKeyValue(final Cell c) throws IOException { + return ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW; + } + } + + @Test + public void testMatchWhenFilterReturnsIncludeAndSeekNextRow() throws IOException { + List expected = new ArrayList<>(); + expected.add(ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_ROW); + expected.add(ScanQueryMatcher.MatchCode.DONE); + + Scan scanWithFilter = new Scan(scan).setFilter(new TestFilter()); + + long now = EnvironmentEdgeManager.currentTime(); + + // scan with column 2,4,5 + UserScanQueryMatcher qm = UserScanQueryMatcher.create( + scanWithFilter, new ScanInfo(this.conf, fam2, 0, 1, ttl, KeepDeletedCells.FALSE, + HConstants.DEFAULT_BLOCKSIZE, 0, rowComparator, false), + get.getFamilyMap().get(fam2), now - ttl, now, null); + + List memstore = new ArrayList<>(); + // ColumnTracker will return INCLUDE_AND_SEEK_NEXT_COL , and filter will return + // INCLUDE_AND_SEEK_NEXT_ROW, so final match code will be INCLUDE_AND_SEEK_NEXT_ROW. + memstore.add(new KeyValue(row1, fam2, col2, 1, data)); + memstore.add(new KeyValue(row2, fam1, col1, data)); + + List actual = new ArrayList<>(memstore.size()); + KeyValue k = memstore.get(0); + qm.setToNewRow(k); + + for (KeyValue kv : memstore) { + actual.add(qm.match(kv)); + } + + assertEquals(expected.size(), actual.size()); + for (int i = 0; i < expected.size(); i++) { + LOG.debug("expected " + expected.get(i) + ", actual " + actual.get(i)); + assertEquals(expected.get(i), actual.get(i)); + } + } }