HBASE-2374 TableInputFormat - Configurable parameter to add column families
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@929006 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
59b13bfb7a
commit
ee3c65fb58
|
@ -467,6 +467,8 @@ Release 0.21.0 - Unreleased
|
||||||
HBASE-2389 HTable - delete / put unnecessary sync (Kay Kay via Stack)
|
HBASE-2389 HTable - delete / put unnecessary sync (Kay Kay via Stack)
|
||||||
HBASE-2385 Debug Message "Received report from unknown server" should be
|
HBASE-2385 Debug Message "Received report from unknown server" should be
|
||||||
INFO or WARN
|
INFO or WARN
|
||||||
|
HBASE-2374 TableInputFormat - Configurable parameter to add column families
|
||||||
|
(Kay Kay via Stack)
|
||||||
|
|
||||||
NEW FEATURES
|
NEW FEATURES
|
||||||
HBASE-1961 HBase EC2 scripts
|
HBASE-1961 HBase EC2 scripts
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -40,8 +41,12 @@ implements Configurable {
|
||||||
|
|
||||||
/** Job parameter that specifies the input table. */
|
/** Job parameter that specifies the input table. */
|
||||||
public static final String INPUT_TABLE = "hbase.mapreduce.inputtable";
|
public static final String INPUT_TABLE = "hbase.mapreduce.inputtable";
|
||||||
/** Base-64 encoded scanner. All other SCAN_ confs are ignored if this is specified. */
|
/** Base-64 encoded scanner. All other SCAN_ confs are ignored if this is specified.
|
||||||
|
* See {@link TableMapReduceUtil#convertScanToString(Scan)} for more details.
|
||||||
|
*/
|
||||||
public static final String SCAN = "hbase.mapreduce.scan";
|
public static final String SCAN = "hbase.mapreduce.scan";
|
||||||
|
/** Column Family to Scan */
|
||||||
|
public static final String SCAN_COLUMN_FAMILY = "hbase.mapreduce.scan.column.family";
|
||||||
/** Space delimited list of columns to scan. */
|
/** Space delimited list of columns to scan. */
|
||||||
public static final String SCAN_COLUMNS = "hbase.mapreduce.scan.columns";
|
public static final String SCAN_COLUMNS = "hbase.mapreduce.scan.columns";
|
||||||
/** The timestamp used to filter columns with a specific timestamp. */
|
/** The timestamp used to filter columns with a specific timestamp. */
|
||||||
|
@ -105,6 +110,10 @@ implements Configurable {
|
||||||
scan.addColumns(conf.get(SCAN_COLUMNS));
|
scan.addColumns(conf.get(SCAN_COLUMNS));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (conf.get(SCAN_COLUMN_FAMILY) != null) {
|
||||||
|
scan.addFamily(Bytes.toBytes(conf.get(SCAN_COLUMN_FAMILY)));
|
||||||
|
}
|
||||||
|
|
||||||
if (conf.get(SCAN_TIMESTAMP) != null) {
|
if (conf.get(SCAN_TIMESTAMP) != null) {
|
||||||
scan.setTimeStamp(Long.parseLong(conf.get(SCAN_TIMESTAMP)));
|
scan.setTimeStamp(Long.parseLong(conf.get(SCAN_TIMESTAMP)));
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue