From 75c717d4c28cc45b870549100db204615022a588 Mon Sep 17 00:00:00 2001 From: Jan Hentschel Date: Wed, 22 Apr 2020 09:15:17 +0200 Subject: [PATCH] HBASE-23848 Removed deprecated setStopRow from Scan (#1184) Signed-off-by: Duo Zhang --- .../hbase/backup/impl/BackupSystemTable.java | 12 +++---- .../hadoop/hbase/MetaTableAccessor.java | 2 +- .../org/apache/hadoop/hbase/client/Scan.java | 34 ++----------------- .../apache/hadoop/hbase/client/TestScan.java | 8 ++--- .../test/IntegrationTestBigLinkedList.java | 5 +-- .../hadoop/hbase/mapreduce/ExportUtils.java | 2 +- .../hadoop/hbase/mapreduce/HashTable.java | 2 +- .../mapreduce/MultiTableInputFormat.java | 4 +-- .../mapreduce/MultiTableInputFormatBase.java | 2 +- .../hadoop/hbase/mapreduce/RowCounter.java | 2 +- .../hadoop/hbase/mapreduce/SyncTable.java | 6 ++-- .../hbase/mapreduce/TableInputFormat.java | 2 +- .../hbase/mapreduce/TableInputFormatBase.java | 2 +- .../replication/VerifyReplication.java | 4 +-- .../MultiTableInputFormatTestBase.java | 2 +- .../hbase/mapreduce/TestTableInputFormat.java | 2 +- .../hadoop/hbase/rest/TableResource.java | 2 +- .../hbase/client/TestFromClientSide5.java | 6 ++-- .../client/TestScannersFromClientSide.java | 2 +- .../TestFilterListOrOperatorWithBlkCnt.java | 2 +- ...TestCleanupCompactedFileOnRegionClose.java | 2 +- .../regionserver/TestSeekOptimizations.java | 2 +- .../hbase/security/access/TestCellACLs.java | 2 +- .../access/TestWithDisabledAuthorization.java | 2 +- .../thrift/ThriftHBaseServiceHandler.java | 2 +- .../hadoop/hbase/thrift2/ThriftUtilities.java | 2 +- 26 files changed, 44 insertions(+), 71 deletions(-) diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java index 1f35d033403..3183ff4e8a0 100644 --- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java +++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/impl/BackupSystemTable.java @@ -1502,7 +1502,7 @@ public final class BackupSystemTable implements Closeable { byte[] stopRow = Arrays.copyOf(startRow, startRow.length); stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1); scan.withStartRow(startRow); - scan.setStopRow(stopRow); + scan.withStopRow(stopRow); scan.addFamily(BackupSystemTable.SESSIONS_FAMILY); scan.readVersions(1); return scan; @@ -1542,7 +1542,7 @@ public final class BackupSystemTable implements Closeable { byte[] stopRow = Arrays.copyOf(startRow, startRow.length); stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1); scan.withStartRow(startRow); - scan.setStopRow(stopRow); + scan.withStopRow(stopRow); scan.addFamily(BackupSystemTable.META_FAMILY); return scan; @@ -1583,7 +1583,7 @@ public final class BackupSystemTable implements Closeable { byte[] stopRow = Arrays.copyOf(startRow, startRow.length); stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1); scan.withStartRow(startRow); - scan.setStopRow(stopRow); + scan.withStopRow(stopRow); scan.addFamily(BackupSystemTable.META_FAMILY); scan.readVersions(1); @@ -1892,7 +1892,7 @@ public final class BackupSystemTable implements Closeable { byte[] stopRow = Arrays.copyOf(startRow, startRow.length); stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1); scan.withStartRow(startRow); - scan.setStopRow(stopRow); + scan.withStopRow(stopRow); scan.addFamily(BackupSystemTable.META_FAMILY); scan.readVersions(1); return scan; @@ -1940,7 +1940,7 @@ public final class BackupSystemTable implements Closeable { byte[] stopRow = Arrays.copyOf(startRow, startRow.length); stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1); scan.withStartRow(startRow); - scan.setStopRow(stopRow); + scan.withStopRow(stopRow); scan.addFamily(BackupSystemTable.META_FAMILY); return scan; } @@ -1967,7 +1967,7 @@ public final class BackupSystemTable implements Closeable { byte[] stopRow = Arrays.copyOf(startRow, startRow.length); stopRow[stopRow.length - 1] = (byte) (stopRow[stopRow.length - 1] + 1); scan.withStartRow(startRow); - scan.setStopRow(stopRow); + scan.withStopRow(stopRow); scan.addFamily(BackupSystemTable.META_FAMILY); return scan; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java index 6bc0e7f43f6..113f22c1d45 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java @@ -577,7 +577,7 @@ public class MetaTableAccessor { Scan scan = getMetaScan(connection, -1); scan.withStartRow(startKey); - scan.setStopRow(stopKey); + scan.withStopRow(stopKey); return scan; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java index 36171dd1748..d515c550f0e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -399,34 +399,6 @@ public class Scan extends Query { return this; } - /** - * Set the stop row of the scan. - *

- * The scan will include rows that are lexicographically less than the provided stopRow. - *

- * Note: When doing a filter for a rowKey Prefix use - * {@link #setRowPrefixFilter(byte[])}. The 'trailing 0' will not yield the desired result. - *

- * @param stopRow row to end at (exclusive) - * @return this - * @throws IllegalArgumentException if stopRow does not meet criteria for a row key (when length - * exceeds {@link HConstants#MAX_ROW_LENGTH}) - * @deprecated since 2.0.0 and will be removed in 3.0.0. Use {@link #withStopRow(byte[])} instead. - * This method may change the inclusive of the stop row to keep compatible with the old - * behavior. - * @see #withStopRow(byte[]) - * @see HBASE-17320 - */ - @Deprecated - public Scan setStopRow(byte[] stopRow) { - withStopRow(stopRow); - if (ClientUtil.areScanStartRowAndStopRowEqual(this.startRow, this.stopRow)) { - // for keeping the old behavior that a scan with the same start and stop row is a get scan. - this.includeStopRow = true; - } - return this; - } - /** * Set the stop row of the scan. *

@@ -471,7 +443,7 @@ public class Scan extends Query { *

This is a utility method that converts the desired rowPrefix into the appropriate values * for the startRow and stopRow to achieve the desired result.

*

This can safely be used in combination with setFilter.

- *

NOTE: Doing a {@link #withStartRow(byte[])} and/or {@link #setStopRow(byte[])} + *

NOTE: Doing a {@link #withStartRow(byte[])} and/or {@link #withStopRow(byte[])} * after this method will yield undefined results.

* @param rowPrefix the prefix all rows must start with. (Set null to remove the filter.) * @return this @@ -479,10 +451,10 @@ public class Scan extends Query { public Scan setRowPrefixFilter(byte[] rowPrefix) { if (rowPrefix == null) { withStartRow(HConstants.EMPTY_START_ROW); - setStopRow(HConstants.EMPTY_END_ROW); + withStopRow(HConstants.EMPTY_END_ROW); } else { this.withStartRow(rowPrefix); - this.setStopRow(ClientUtil.calculateTheClosestNextRowKeyForPrefix(rowPrefix)); + this.withStopRow(ClientUtil.calculateTheClosestNextRowKeyForPrefix(rowPrefix)); } return this; } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java index cba11ae415d..b0f6b196b10 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java @@ -205,11 +205,11 @@ public class TestScan { fail("expected IllegalArgumentException to be thrown"); } - scan.setStopRow(null); - scan.setStopRow(new byte[1]); - scan.setStopRow(new byte[HConstants.MAX_ROW_LENGTH]); + scan.withStopRow(null); + scan.withStopRow(new byte[1]); + scan.withStopRow(new byte[HConstants.MAX_ROW_LENGTH]); try { - scan.setStopRow(new byte[HConstants.MAX_ROW_LENGTH+1]); + scan.withStopRow(new byte[HConstants.MAX_ROW_LENGTH+1]); fail("should've thrown exception"); } catch (IllegalArgumentException iae) { } catch (Exception e) { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java index 3cda64056bc..ce2314ebf76 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java @@ -1655,8 +1655,9 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase { if (cmd.hasOption("s")) scan.withStartRow(Bytes.toBytesBinary(cmd.getOptionValue("s"))); - if (cmd.hasOption("e")) - scan.setStopRow(Bytes.toBytesBinary(cmd.getOptionValue("e"))); + if (cmd.hasOption("e")) { + scan.withStopRow(Bytes.toBytesBinary(cmd.getOptionValue("e"))); + } int limit = 0; if (cmd.hasOption("l")) diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java index 3683931a9f6..a4f092b7179 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/ExportUtils.java @@ -124,7 +124,7 @@ public final class ExportUtils { s.withStartRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_START))); } if (conf.get(TableInputFormat.SCAN_ROW_STOP) != null) { - s.setStopRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_STOP))); + s.withStopRow(Bytes.toBytesBinary(conf.get(TableInputFormat.SCAN_ROW_STOP))); } // Set Scan Column Family boolean raw = Boolean.parseBoolean(conf.get(RAW_SCAN)); diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java index 4f242642c29..ea09022f6cf 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HashTable.java @@ -198,7 +198,7 @@ public class HashTable extends Configured implements Tool { scan.withStartRow(startRow); } if (!isTableEndRow(stopRow)) { - scan.setStopRow(stopRow); + scan.withStopRow(stopRow); } if(families != null) { for(String fam : families.split(",")) { diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java index 456ea628382..b69b486ba27 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.java @@ -39,13 +39,13 @@ import org.apache.hadoop.hbase.client.Scan; * * Scan scan1 = new Scan(); * scan1.withStartRow(firstRow1); - * scan1.setStopRow(lastRow1); + * scan1.withStopRow(lastRow1); * scan1.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, table1); * scans.add(scan1); * * Scan scan2 = new Scan(); * scan2.withStartRow(firstRow2); - * scan2.setStopRow(lastRow2); + * scan2.withStopRow(lastRow2); * scan1.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, table2); * scans.add(scan2); * diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java index 6146097a584..314b3a6310b 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java @@ -99,7 +99,7 @@ public abstract class MultiTableInputFormatBase extends try { Scan sc = tSplit.getScan(); sc.withStartRow(tSplit.getStartRow()); - sc.setStopRow(tSplit.getEndRow()); + sc.withStopRow(tSplit.getEndRow()); trr.setScan(sc); trr.setTable(table); return new RecordReader() { diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java index 3a014b8af4e..0c877515968 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/RowCounter.java @@ -172,7 +172,7 @@ public class RowCounter extends AbstractHBaseTool { if (size == 1) { MultiRowRangeFilter.RowRange range = rowRangeList.get(0); scan.withStartRow(range.getStartRow()); //inclusive - scan.setStopRow(range.getStopRow()); //exclusive + scan.withStopRow(range.getStopRow()); //exclusive } else if (size > 1) { scan.setFilter(new MultiRowRangeFilter(rowRangeList)); } diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java index 664b8cd5ae3..699ac82c7f4 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/SyncTable.java @@ -341,7 +341,7 @@ public class SyncTable extends Configured implements Tool { ImmutableBytesWritable stopRow) throws IOException, InterruptedException { Scan scan = sourceTableHash.initScan(); scan.withStartRow(startRow.copyBytes()); - scan.setStopRow(stopRow.copyBytes()); + scan.withStopRow(stopRow.copyBytes()); ResultScanner sourceScanner = sourceTable.getScanner(scan); CellScanner sourceCells = new CellScanner(sourceScanner.iterator()); @@ -683,9 +683,9 @@ public class SyncTable extends Configured implements Tool { Scan scan = sourceTableHash.initScan(); scan.withStartRow(splitEndRow); if (nextSourceKey == null) { - scan.setStopRow(sourceTableHash.stopRow); + scan.withStopRow(sourceTableHash.stopRow); } else { - scan.setStopRow(nextSourceKey.copyBytes()); + scan.withStopRow(nextSourceKey.copyBytes()); } ResultScanner targetScanner = null; diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java index 29bc0ddf23a..c2351b91fe6 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java @@ -152,7 +152,7 @@ implements Configurable { } if (conf.get(SCAN_ROW_STOP) != null) { - scan.setStopRow(Bytes.toBytesBinary(conf.get(SCAN_ROW_STOP))); + scan.withStopRow(Bytes.toBytesBinary(conf.get(SCAN_ROW_STOP))); } if (conf.get(SCAN_COLUMNS) != null) { diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java index dce2d2394f1..e7c5bf4fb2d 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormatBase.java @@ -182,7 +182,7 @@ public abstract class TableInputFormatBase this.tableRecordReader != null ? this.tableRecordReader : new TableRecordReader(); Scan sc = new Scan(this.scan); sc.withStartRow(tSplit.getStartRow()); - sc.setStopRow(tSplit.getEndRow()); + sc.withStopRow(tSplit.getEndRow()); trr.setScan(sc); trr.setTable(getTable()); return new RecordReader() { diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java index 1edf6ed2573..389aa83b22d 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/replication/VerifyReplication.java @@ -208,7 +208,7 @@ public class VerifyReplication extends Configured implements Tool { endRow = ((TableSplit) tableSplit).getEndRow(); } - scan.setStopRow(endRow); + scan.withStopRow(endRow); String peerSnapshotName = conf.get(NAME + ".peerSnapshotName", null); if (peerSnapshotName != null) { @@ -514,7 +514,7 @@ public class VerifyReplication extends Configured implements Tool { scan.withStartRow(startPrefixRow); byte[] stopRow = Bytes.add(Bytes.head(lastPrefixRow, lastPrefixRow.length - 1), new byte[]{(byte) (lastPrefixRow[lastPrefixRow.length - 1] + 1)}); - scan.setStopRow(stopRow); + scan.withStopRow(stopRow); } @VisibleForTesting diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java index ad22d0b6403..e9a385cd1cd 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java @@ -241,7 +241,7 @@ public abstract class MultiTableInputFormatTestBase { scan.withStartRow(Bytes.toBytes(start)); } if (stop != null) { - scan.setStopRow(Bytes.toBytes(stop)); + scan.withStopRow(Bytes.toBytes(stop)); } scans.add(scan); diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java index f3c14aa3e5d..8cf90015651 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java @@ -170,7 +170,7 @@ public class TestTableInputFormat { new org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl(); Scan s = new Scan(); s.withStartRow(Bytes.toBytes("aaa")); - s.setStopRow(Bytes.toBytes("zzz")); + s.withStopRow(Bytes.toBytes("zzz")); s.addFamily(FAMILY); trr.setScan(s); trr.setHTable(table); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java index 66487588edd..d73e4f48f4f 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/TableResource.java @@ -156,7 +156,7 @@ public class TableResource extends ResourceBase { if (!startRow.isEmpty()) { tableScan.withStartRow(Bytes.toBytes(startRow)); } - tableScan.setStopRow(Bytes.toBytes(endRow)); + tableScan.withStopRow(Bytes.toBytes(endRow)); for (String col : column) { byte [][] parts = CellUtil.parseColumn(Bytes.toBytes(col.trim())); if (parts.length == 1) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java index 38ddf2ffb37..b034cc3d43e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide5.java @@ -1320,7 +1320,7 @@ public class TestFromClientSide5 extends FromClientSideBase { Scan scan = new Scan(); scan.withStartRow(Bytes.toBytes(1)); - scan.setStopRow(Bytes.toBytes(3)); + scan.withStopRow(Bytes.toBytes(3)); scan.addColumn(FAMILY, FAMILY); scan.setFilter(new RowFilter(CompareOperator.NOT_EQUAL, new BinaryComparator(Bytes.toBytes(1)))); @@ -2139,7 +2139,7 @@ public class TestFromClientSide5 extends FromClientSideBase { scan.setSmall(small); scan.setReversed(true); scan.withStartRow(Bytes.toBytes("002")); - scan.setStopRow(Bytes.toBytes("000")); + scan.withStopRow(Bytes.toBytes("000")); try (ResultScanner scanner = table.getScanner(scan)) { int count = 0; byte[] lastRow = null; @@ -2203,7 +2203,7 @@ public class TestFromClientSide5 extends FromClientSideBase { scan.setSmall(small); scan.setReversed(true); scan.withStartRow(Bytes.toBytes("006")); - scan.setStopRow(Bytes.toBytes("002")); + scan.withStopRow(Bytes.toBytes("002")); try (ResultScanner scanner = table.getScanner(scan)) { int count = 0; byte[] lastRow = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java index f91a228f76f..c045c606b78 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestScannersFromClientSide.java @@ -908,7 +908,7 @@ public class TestScannersFromClientSide { scan.addFamily(Bytes.toBytes("c")); scan.setAttribute(Scan.SCAN_ATTRIBUTES_TABLE_NAME, tableName.getName()); scan.setMaxResultSize(10001); - scan.setStopRow(Bytes.toBytes("bbbb")); + scan.withStopRow(Bytes.toBytes("bbbb")); scan.setFilter(new LimitKVsReturnFilter()); ResultScanner rs = table.getScanner(scan); Result result; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java index 24ac7945e45..72edac66dc9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java @@ -159,7 +159,7 @@ public class TestFilterListOrOperatorWithBlkCnt { scan.withStartRow(startRow); } if(!Bytes.toString(stopRow).isEmpty()) { - scan.setStopRow(stopRow); + scan.withStopRow(stopRow); } ResultScanner scanner = ht.getScanner(scan); List kvList = new ArrayList<>(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCleanupCompactedFileOnRegionClose.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCleanupCompactedFileOnRegionClose.java index 0bf4e78c8a7..ac44fe0e4d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCleanupCompactedFileOnRegionClose.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCleanupCompactedFileOnRegionClose.java @@ -101,7 +101,7 @@ public class TestCleanupCompactedFileOnRegionClose { //Create a scanner and keep it open to add references to StoreFileReaders Scan scan = new Scan(); - scan.setStopRow(Bytes.toBytes(refSFCount-2)); + scan.withStopRow(Bytes.toBytes(refSFCount-2)); scan.setCaching(1); ResultScanner scanner = table.getScanner(scan); Result res = scanner.next(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java index 7eb7fcf963f..2b408a72456 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSeekOptimizations.java @@ -221,7 +221,7 @@ public class TestSeekOptimizations { { final byte[] scannerStopRow = rowBytes(endRow + (startRow != endRow ? 1 : 0)); - scan.setStopRow(scannerStopRow); + scan.withStopRow(scannerStopRow); } final long initialSeekCount = StoreFileScanner.getSeekCount(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java index d0445c5f1e1..fe85f768dba 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCellACLs.java @@ -242,7 +242,7 @@ public class TestCellACLs extends SecureTestUtil { public List run() throws Exception { Scan scan = new Scan(); scan.withStartRow(TEST_ROW); - scan.setStopRow(Bytes.add(TEST_ROW, new byte[]{ 0 } )); + scan.withStopRow(Bytes.add(TEST_ROW, new byte[]{ 0 })); scan.addFamily(TEST_FAMILY); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(testTable.getTableName()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java index 74fa3e8e8fe..a112f7e32d6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestWithDisabledAuthorization.java @@ -974,7 +974,7 @@ public class TestWithDisabledAuthorization extends SecureTestUtil { public List run() throws Exception { Scan scan = new Scan(); scan.withStartRow(TEST_ROW); - scan.setStopRow(Bytes.add(TEST_ROW, new byte[]{ 0 } )); + scan.withStopRow(Bytes.add(TEST_ROW, new byte[]{ 0 })); scan.addFamily(TEST_FAMILY); Connection connection = ConnectionFactory.createConnection(conf); Table t = connection.getTable(testTable.getTableName()); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java index 4682d6d6f9f..32706a8ee8c 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java @@ -886,7 +886,7 @@ public class ThriftHBaseServiceHandler extends HBaseServiceHandler implements Hb scan.withStartRow(tScan.getStartRow()); } if (tScan.isSetStopRow()) { - scan.setStopRow(tScan.getStopRow()); + scan.withStopRow(tScan.getStopRow()); } if (tScan.isSetTimestamp()) { scan.setTimeRange(0, tScan.getTimestamp()); diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java index 37ac96bb217..3393e3c4d5a 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java @@ -525,7 +525,7 @@ public final class ThriftUtilities { out.withStartRow(in.getStartRow()); } if (in.isSetStopRow()) { - out.setStopRow(in.getStopRow()); + out.withStopRow(in.getStopRow()); } if (in.isSetCaching()) { out.setCaching(in.getCaching());