HBASE-21636 Enhance the shell scan command to support missing scanner specifications like ReadType, IsolationLevel etc.

This commit is contained in:
Nihal Jain 2019-01-24 23:47:57 +05:30 committed by stack
parent ebfde02343
commit 353233c296
4 changed files with 53 additions and 8 deletions

View File

@ -508,6 +508,13 @@ EOF
# Normalize column names # Normalize column names
columns = [columns] if columns.class == String columns = [columns] if columns.class == String
limit = args['LIMIT'] || -1 limit = args['LIMIT'] || -1
replica_id = args[REGION_REPLICA_ID]
isolation_level = args[ISOLATION_LEVEL]
read_type = args[READ_TYPE]
allow_partial_results = args[ALLOW_PARTIAL_RESULTS].nil? ? false : args[ALLOW_PARTIAL_RESULTS]
batch = args[BATCH] || -1
max_result_size = args[MAX_RESULT_SIZE] || -1
unless columns.is_a?(Array) unless columns.is_a?(Array)
raise ArgumentError, 'COLUMNS must be specified as a String or an Array' raise ArgumentError, 'COLUMNS must be specified as a String or an Array'
end end
@ -549,10 +556,16 @@ EOF
scan.setMaxVersions(versions) if versions > 1 scan.setMaxVersions(versions) if versions > 1
scan.setTimeRange(timerange[0], timerange[1]) if timerange scan.setTimeRange(timerange[0], timerange[1]) if timerange
scan.setRaw(raw) scan.setRaw(raw)
scan.setCaching(limit) if limit > 0 scan.setLimit(limit) if limit > 0
set_attributes(scan, attributes) if attributes set_attributes(scan, attributes) if attributes
set_authorizations(scan, authorizations) if authorizations set_authorizations(scan, authorizations) if authorizations
scan.setConsistency(org.apache.hadoop.hbase.client.Consistency.valueOf(consistency)) if consistency scan.setConsistency(org.apache.hadoop.hbase.client.Consistency.valueOf(consistency)) if consistency
scan.setReplicaId(replica_id) if replica_id
scan.setIsolationLevel(org.apache.hadoop.hbase.client.IsolationLevel.valueOf(isolation_level)) if isolation_level
scan.setReadType(org.apache.hadoop.hbase.client::Scan::ReadType.valueOf(read_type)) if read_type
scan.setAllowPartialResults(allow_partial_results) if allow_partial_results
scan.setBatch(batch) if batch > 0
scan.setMaxResultSize(max_result_size) if max_result_size > 0
else else
scan = org.apache.hadoop.hbase.client.Scan.new scan = org.apache.hadoop.hbase.client.Scan.new
end end
@ -571,7 +584,6 @@ EOF
raise(ArgumentError, 'Scan argument should be org.apache.hadoop.hbase.client.Scan') \ raise(ArgumentError, 'Scan argument should be org.apache.hadoop.hbase.client.Scan') \
unless scan.nil? || scan.is_a?(org.apache.hadoop.hbase.client.Scan) unless scan.nil? || scan.is_a?(org.apache.hadoop.hbase.client.Scan)
limit = args['LIMIT'] || -1
maxlength = args.delete('MAXLENGTH') || -1 maxlength = args.delete('MAXLENGTH') || -1
converter = args.delete(FORMATTER) || nil converter = args.delete(FORMATTER) || nil
converter_class = args.delete(FORMATTER_CLASS) || 'org.apache.hadoop.hbase.util.Bytes' converter_class = args.delete(FORMATTER_CLASS) || 'org.apache.hadoop.hbase.util.Bytes'
@ -605,13 +617,8 @@ EOF
res[key][column] = cell res[key][column] = cell
end end
end end
# One more row processed # One more row processed
count += 1 count += 1
if limit > 0 && count >= limit
# If we reached the limit, exit before the next call to hasNext
break
end
end end
scanner.close scanner.close

View File

@ -65,6 +65,11 @@ module HBaseConstants
NUMREGIONS = 'NUMREGIONS'.freeze NUMREGIONS = 'NUMREGIONS'.freeze
REGION_REPLICATION = 'REGION_REPLICATION'.freeze REGION_REPLICATION = 'REGION_REPLICATION'.freeze
REGION_REPLICA_ID = 'REGION_REPLICA_ID'.freeze REGION_REPLICA_ID = 'REGION_REPLICA_ID'.freeze
ISOLATION_LEVEL = 'ISOLATION_LEVEL'.freeze
READ_TYPE = 'READ_TYPE'.freeze
ALLOW_PARTIAL_RESULTS = 'ALLOW_PARTIAL_RESULTS'.freeze
BATCH = 'BATCH'.freeze
MAX_RESULT_SIZE = 'MAX_RESULT_SIZE'.freeze
CONFIGURATION = org.apache.hadoop.hbase.HConstants::CONFIGURATION CONFIGURATION = org.apache.hadoop.hbase.HConstants::CONFIGURATION
ATTRIBUTES = 'ATTRIBUTES'.freeze ATTRIBUTES = 'ATTRIBUTES'.freeze
VISIBILITY = 'VISIBILITY'.freeze VISIBILITY = 'VISIBILITY'.freeze

View File

@ -25,7 +25,9 @@ module Shell
Scan a table; pass table name and optionally a dictionary of scanner Scan a table; pass table name and optionally a dictionary of scanner
specifications. Scanner specifications may include one or more of: specifications. Scanner specifications may include one or more of:
TIMERANGE, FILTER, LIMIT, STARTROW, STOPROW, ROWPREFIXFILTER, TIMESTAMP, TIMERANGE, FILTER, LIMIT, STARTROW, STOPROW, ROWPREFIXFILTER, TIMESTAMP,
MAXLENGTH or COLUMNS, CACHE or RAW, VERSIONS, ALL_METRICS or METRICS MAXLENGTH, COLUMNS, CACHE, RAW, VERSIONS, ALL_METRICS, METRICS,
REGION_REPLICA_ID, ISOLATION_LEVEL, READ_TYPE, ALLOW_PARTIAL_RESULTS,
BATCH or MAX_RESULT_SIZE
If no columns are specified, all columns will be scanned. If no columns are specified, all columns will be scanned.
To scan all members of a column family, leave the qualifier empty as in To scan all members of a column family, leave the qualifier empty as in
@ -56,6 +58,8 @@ Some examples:
hbase> scan 't1', {FILTER => hbase> scan 't1', {FILTER =>
org.apache.hadoop.hbase.filter.ColumnPaginationFilter.new(1, 0)} org.apache.hadoop.hbase.filter.ColumnPaginationFilter.new(1, 0)}
hbase> scan 't1', {CONSISTENCY => 'TIMELINE'} hbase> scan 't1', {CONSISTENCY => 'TIMELINE'}
hbase> scan 't1', {ISOLATION_LEVEL => 'READ_UNCOMMITTED'}
hbase> scan 't1', {MAX_RESULT_SIZE => 123456}
For setting the Operation Attributes For setting the Operation Attributes
hbase> scan 't1', { COLUMNS => ['c1', 'c2'], ATTRIBUTES => {'mykey' => 'myvalue'}} hbase> scan 't1', { COLUMNS => ['c1', 'c2'], ATTRIBUTES => {'mykey' => 'myvalue'}}
hbase> scan 't1', { COLUMNS => ['c1', 'c2'], AUTHORIZATIONS => ['PRIVATE','SECRET']} hbase> scan 't1', { COLUMNS => ['c1', 'c2'], AUTHORIZATIONS => ['PRIVATE','SECRET']}
@ -72,6 +76,11 @@ Disabled by default. Example:
hbase> scan 't1', {RAW => true, VERSIONS => 10} hbase> scan 't1', {RAW => true, VERSIONS => 10}
There is yet another option -- READ_TYPE -- which instructs the scanner to
use a specific read type. Example:
hbase> scan 't1', {READ_TYPE => 'PREAD'}
Besides the default 'toStringBinary' format, 'scan' supports custom formatting Besides the default 'toStringBinary' format, 'scan' supports custom formatting
by column. A user can define a FORMATTER by adding it to the column name in by column. A user can define a FORMATTER by adding it to the column name in
the scan specification. The FORMATTER can be stipulated: the scan specification. The FORMATTER can be stipulated:

View File

@ -602,6 +602,30 @@ module Hbase
assert_nil(res['2']['x:b']) assert_nil(res['2']['x:b'])
end end
define_test 'scan should support REGION_REPLICA_ID' do
res = @test_table._scan_internal REGION_REPLICA_ID => 0
assert_not_nil(res)
end
define_test 'scan should support ISOLATION_LEVEL' do
res = @test_table._scan_internal ISOLATION_LEVEL => 'READ_COMMITTED'
assert_not_nil(res)
end
define_test 'scan should support READ_TYPE parameter' do
res = @test_table._scan_internal READ_TYPE => 'PREAD'
assert_not_nil(res)
res = @test_table._scan_internal READ_TYPE => 'STREAM'
assert_not_nil(res)
res = @test_table._scan_internal READ_TYPE => 'DEFAULT'
assert_not_nil(res)
end
define_test 'scan should support ALLOW_PARTIAL_RESULTS' do
res = @test_table._scan_internal ALLOW_PARTIAL_RESULTS => true
assert_not_nil(res)
end
define_test "scan should work with raw and version parameter" do define_test "scan should work with raw and version parameter" do
# Create test table if it does not exist # Create test table if it does not exist
@test_name_raw = "hbase_shell_tests_raw_scan" @test_name_raw = "hbase_shell_tests_raw_scan"