HBASE-2095 TIF should support more confs for the scanner

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@896935 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrew Kyle Purtell 2010-01-07 17:34:15 +00:00
parent 73aa65deb3
commit a9482a2382
2 changed files with 63 additions and 9 deletions

View File

@ -277,9 +277,10 @@ Release 0.21.0 - Unreleased
HBASE-2080 [EC2] Support multivolume local instance storage HBASE-2080 [EC2] Support multivolume local instance storage
HBASE-2083 [EC2] HDFS DataNode no longer required on master HBASE-2083 [EC2] HDFS DataNode no longer required on master
HBASE-2084 [EC2] JAVA_HOME handling broken HBASE-2084 [EC2] JAVA_HOME handling broken
HBASE-2036 Use Configuration instead of HBaseConfiguration (Enis Soztutar via Stack) HBASE-2036 Use Configuration instead of HBaseConfiguration (Enis Soztutar
HBASE-2085 StringBuffer -> StringBuilder - conversion of references as necessary via Stack)
(Kay Kay via Stack) HBASE-2085 StringBuffer -> StringBuilder - conversion of references as
necessary (Kay Kay via Stack)
HBASE-2052 Upper bound of outstanding WALs can be overrun HBASE-2052 Upper bound of outstanding WALs can be overrun
HBASE-2086 Job(configuration,String) deprecated (Kay Kay via Stack) HBASE-2086 Job(configuration,String) deprecated (Kay Kay via Stack)
HBASE-1996 Configure scanner buffer in bytes instead of number of rows HBASE-1996 Configure scanner buffer in bytes instead of number of rows
@ -287,6 +288,8 @@ Release 0.21.0 - Unreleased
HBASE-2090 findbugs issues (Kay Kay via Stack) HBASE-2090 findbugs issues (Kay Kay via Stack)
HBASE-2089 HBaseConfiguration() ctor. deprecated (Kay Kay via Stack) HBASE-2089 HBaseConfiguration() ctor. deprecated (Kay Kay via Stack)
HBASE-2035 Binary values are formatted wrong in shell HBASE-2035 Binary values are formatted wrong in shell
HBASE-2095 TIF shuold support more confs for the scanner (Bassam Tabbara
via Andrew Purtell)
NEW FEATURES NEW FEATURES
HBASE-1901 "General" partitioner for "hbase-48" bulk (behind the api, write HBASE-1901 "General" partitioner for "hbase-48" bulk (behind the api, write

View File

@ -38,10 +38,24 @@ implements Configurable {
private final Log LOG = LogFactory.getLog(TableInputFormat.class); private final Log LOG = LogFactory.getLog(TableInputFormat.class);
/** Job parameter that specifies the output table. */ /** Job parameter that specifies the input table. */
public static final String INPUT_TABLE = "hbase.mapreduce.inputtable"; public static final String INPUT_TABLE = "hbase.mapreduce.inputtable";
/** Space delimited list of columns. */ /** Base-64 encoded scanner. All other SCAN_ confs are ignored if this is specified. */
public static final String SCAN = "hbase.mapreduce.scan"; public static final String SCAN = "hbase.mapreduce.scan";
/** Space delimited list of columns to scan. */
public static final String SCAN_COLUMNS = "hbase.mapreduce.scan.columns";
/** The timestamp used to filter columns with a specific timestamp. */
public static final String SCAN_TIMESTAMP = "hbase.mapreduce.scan.timestamp";
/** The starting timestamp used to filter columns with a specific range of versions. */
public static final String SCAN_TIMERANGE_START = "hbase.mapreduce.scan.timerange.start";
/** The ending timestamp used to filter columns with a specific range of versions. */
public static final String SCAN_TIMERANGE_END = "hbase.mapreduce.scan.timerange.end";
/** The maximum number of version to return. */
public static final String SCAN_MAXVERSIONS = "hbase.mapreduce.scan.maxversions";
/** Set to false to disable server-side caching of blocks for this scan. */
public static final String SCAN_CACHEBLOCKS = "hbase.mapreduce.scan.cacheblocks";
/** The number of rows for caching that will be passed to scanners. */
public static final String SCAN_CACHEDROWS = "hbase.mapreduce.scan.cachedrows";
/** The configuration. */ /** The configuration. */
private Configuration conf = null; private Configuration conf = null;
@ -74,12 +88,49 @@ implements Configurable {
} catch (Exception e) { } catch (Exception e) {
LOG.error(StringUtils.stringifyException(e)); LOG.error(StringUtils.stringifyException(e));
} }
Scan scan = null; Scan scan = null;
if (conf.get(SCAN) != null) {
try { try {
scan = TableMapReduceUtil.convertStringToScan(conf.get(SCAN)); scan = TableMapReduceUtil.convertStringToScan(conf.get(SCAN));
} catch (IOException e) { } catch (IOException e) {
LOG.error("An error occurred.", e); LOG.error("An error occurred.", e);
} }
} else {
try {
scan = new Scan();
if (conf.get(SCAN_COLUMNS) != null) {
scan.addColumns(conf.get(SCAN_COLUMNS));
}
if (conf.get(SCAN_TIMESTAMP) != null) {
scan.setTimeStamp(Long.parseLong(conf.get(SCAN_TIMESTAMP)));
}
if (conf.get(SCAN_TIMERANGE_START) != null && conf.get(SCAN_TIMERANGE_END) != null) {
scan.setTimeRange(
Long.parseLong(conf.get(SCAN_TIMERANGE_START)),
Long.parseLong(conf.get(SCAN_TIMERANGE_END)));
}
if (conf.get(SCAN_MAXVERSIONS) != null) {
scan.setMaxVersions(Integer.parseInt(conf.get(SCAN_MAXVERSIONS)));
}
if (conf.get(SCAN_CACHEBLOCKS) != null) {
scan.setCacheBlocks(Boolean.parseBoolean(conf.get(SCAN_CACHEBLOCKS)));
}
if (conf.get(SCAN_CACHEDROWS) != null) {
scan.setCaching(Integer.parseInt(conf.get(SCAN_CACHEDROWS)));
}
} catch (Exception e) {
LOG.error(StringUtils.stringifyException(e));
}
}
setScan(scan); setScan(scan);
} }