diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 3ef64ec8977..9ee3d2f9a5f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -77,6 +77,7 @@ import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Mutation; +import org.apache.hadoop.hbase.client.OperationWithAttributes; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionReplicaUtil; @@ -1366,6 +1367,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler, AdminService.Blockin StringBuilder builder = new StringBuilder(); builder.append("table: ").append(scanner.getRegionInfo().getTable().getNameAsString()); builder.append(" region: ").append(scanner.getRegionInfo().getRegionNameAsString()); + builder.append(" operation_id: ").append(scanner.getOperationId()); return builder.toString(); } @@ -1378,6 +1380,12 @@ public class RSRpcServices implements HBaseRPCErrorHandler, AdminService.Blockin StringBuilder builder = new StringBuilder(); builder.append("table: ").append(region.getRegionInfo().getTable().getNameAsString()); builder.append(" region: ").append(region.getRegionInfo().getRegionNameAsString()); + for (NameBytesPair pair : request.getScan().getAttributeList()) { + if (OperationWithAttributes.ID_ATRIBUTE.equals(pair.getName())) { + builder.append(" operation_id: ").append(Bytes.toString(pair.getValue().toByteArray())); + break; + } + } return builder.toString(); } catch (IOException ignored) { return null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java index 1860d8185e1..8d80dc2f8bc 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScanner.java @@ -73,6 +73,14 @@ public interface RegionScanner extends InternalScanner { */ int getBatch(); + /** + * @return The Scanner's {@link org.apache.hadoop.hbase.client.Scan#ID_ATRIBUTE} value, + * or null if not set. + */ + default String getOperationId() { + return null; + } + /** * Grab the next row's worth of values. This is a special internal method to be called from * coprocessor hooks to avoid expensive setup. Caller must set the thread's readpoint, start and diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScannerImpl.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScannerImpl.java index 881f4234bf9..f73aea6e8c1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScannerImpl.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionScannerImpl.java @@ -89,6 +89,7 @@ class RegionScannerImpl implements RegionScanner, Shipper, RpcCallback { private final long maxResultSize; private final ScannerContext defaultScannerContext; private final FilterWrapper filter; + private final String operationId; private RegionServerServices rsServices; @@ -121,6 +122,7 @@ class RegionScannerImpl implements RegionScanner, Shipper, RpcCallback { defaultScannerContext = ScannerContext.newBuilder().setBatchLimit(scan.getBatch()).build(); this.stopRow = scan.getStopRow(); this.includeStopRow = scan.includeStopRow(); + this.operationId = scan.getId(); // synchronize on scannerReadPoints so that nobody calculates // getSmallestReadPoint, before scannerReadPoints is updated. @@ -215,6 +217,11 @@ class RegionScannerImpl implements RegionScanner, Shipper, RpcCallback { return this.defaultScannerContext.getBatchLimit(); } + @Override + public String getOperationId() { + return operationId; + } + /** * Reset both the filter and the old filter. * @throws IOException in case a filter raises an I/O exception. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index b4e0110f499..6498451381b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -4439,6 +4439,23 @@ public class TestHRegion { } } + @Test + public void testScannerOperationId() throws IOException { + region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES); + Scan scan = new Scan(); + RegionScanner scanner = region.getScanner(scan); + assertNull(scanner.getOperationId()); + scanner.close(); + + String operationId = "test_operation_id_0101"; + scan = new Scan().setId(operationId); + scanner = region.getScanner(scan); + assertEquals(operationId, scanner.getOperationId()); + scanner.close(); + + HBaseTestingUtil.closeRegionAndWAL(this.region); + } + /** * Write an HFile block full with Cells whose qualifier that are identical between * 0 and Short.MAX_VALUE. See HBASE-13329.