diff --git a/CHANGES.txt b/CHANGES.txt index fcdb03a6cc2..c0e30c5a801 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -438,6 +438,7 @@ Release 0.92.0 - Unreleased HBASE-4816 Regionserver wouldn't go down because split happened exactly at same time we issued bulk user region close call on our way out HBASE-4815 Disable online altering by default, create a config for it + HBASE-4623 Remove @deprecated Scan methods in 0.90 from TRUNK and 0.92 TESTS HBASE-4450 test for number of blocks read: to serve as baseline for expected diff --git a/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/src/main/java/org/apache/hadoop/hbase/client/Scan.java index 9b4d31da940..0fa6b027dba 100644 --- a/src/main/java/org/apache/hadoop/hbase/client/Scan.java +++ b/src/main/java/org/apache/hadoop/hbase/client/Scan.java @@ -609,94 +609,6 @@ public class Scan extends OperationWithAttributes implements Writable { writeAttributes(out); } - /** - * Parses a combined family and qualifier and adds either both or just the - * family in case there is not qualifier. This assumes the older colon - * divided notation, e.g. "data:contents" or "meta:". - *

- * Note: It will through an error when the colon is missing. - * - * @param familyAndQualifier family and qualifier - * @return A reference to this instance. - * @throws IllegalArgumentException When the colon is missing. - * @deprecated use {@link #addColumn(byte[], byte[])} instead - */ - public Scan addColumn(byte[] familyAndQualifier) { - byte [][] fq = KeyValue.parseColumn(familyAndQualifier); - if (fq.length > 1 && fq[1] != null && fq[1].length > 0) { - addColumn(fq[0], fq[1]); - } else { - addFamily(fq[0]); - } - return this; - } - - /** - * Adds an array of columns specified using old format, family:qualifier. - *

- * Overrides previous calls to addFamily for any families in the input. - * - * @param columns array of columns, formatted as

family:qualifier
- * @deprecated issue multiple {@link #addColumn(byte[], byte[])} instead - * @return this - */ - public Scan addColumns(byte [][] columns) { - for (byte[] column : columns) { - addColumn(column); - } - return this; - } - - /** - * Convenience method to help parse old style (or rather user entry on the - * command line) column definitions, e.g. "data:contents mime:". The columns - * must be space delimited and always have a colon (":") to denote family - * and qualifier. - * - * @param columns The columns to parse. - * @return A reference to this instance. - * @deprecated use {@link #addColumn(byte[], byte[])} instead - */ - public Scan addColumns(String columns) { - String[] cols = columns.split(" "); - for (String col : cols) { - addColumn(Bytes.toBytes(col)); - } - return this; - } - - /** - * Helps to convert the binary column families and qualifiers to a text - * representation, e.g. "data:mimetype data:contents meta:". Binary values - * are properly encoded using {@link Bytes#toBytesBinary(String)}. - * - * @return The columns in an old style string format. - * @deprecated - */ - public String getInputColumns() { - StringBuilder cols = new StringBuilder(""); - for (Map.Entry> e : - familyMap.entrySet()) { - byte[] fam = e.getKey(); - if (cols.length() > 0) cols.append(" "); - NavigableSet quals = e.getValue(); - // check if this family has qualifiers - if (quals != null && quals.size() > 0) { - StringBuilder cs = new StringBuilder(""); - for (byte[] qual : quals) { - if (cs.length() > 0) cs.append(" "); - // encode values to make parsing easier later - cs.append(Bytes.toStringBinary(fam)).append(":").append(Bytes.toStringBinary(qual)); - } - cols.append(cs); - } else { - // only add the family but with old style delimiter - cols.append(Bytes.toStringBinary(fam)).append(":"); - } - } - return cols.toString(); - } - /** * Enable/disable "raw" mode for this scan. * If "raw" is enabled the scan will return all diff --git a/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java b/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java index c875edb6950..42569fbdd25 100644 --- a/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java +++ b/src/main/java/org/apache/hadoop/hbase/mapred/TableRecordReaderImpl.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.mapreduce.TableInputFormat; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.util.StringUtils; @@ -59,7 +60,7 @@ public class TableRecordReaderImpl { if ((endRow != null) && (endRow.length > 0)) { if (trrRowFilter != null) { Scan scan = new Scan(firstRow, endRow); - scan.addColumns(trrInputColumns); + TableInputFormat.addColumns(scan, trrInputColumns); scan.setFilter(trrRowFilter); scan.setCacheBlocks(false); this.scanner = this.htable.getScanner(scan); @@ -68,7 +69,7 @@ public class TableRecordReaderImpl { Bytes.toStringBinary(firstRow) + ", endRow: " + Bytes.toStringBinary(endRow)); Scan scan = new Scan(firstRow, endRow); - scan.addColumns(trrInputColumns); + TableInputFormat.addColumns(scan, trrInputColumns); this.scanner = this.htable.getScanner(scan); } } else { @@ -76,8 +77,8 @@ public class TableRecordReaderImpl { Bytes.toStringBinary(firstRow) + ", no endRow"); Scan scan = new Scan(firstRow); - scan.addColumns(trrInputColumns); -// scan.setFilter(trrRowFilter); + TableInputFormat.addColumns(scan, trrInputColumns); + scan.setFilter(trrRowFilter); this.scanner = this.htable.getScanner(scan); } } diff --git a/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java index 5dfb13e3d07..ef19e6d8d38 100644 --- a/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java +++ b/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java @@ -25,6 +25,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; @@ -106,7 +107,7 @@ implements Configurable { scan = new Scan(); if (conf.get(SCAN_COLUMNS) != null) { - scan.addColumns(conf.get(SCAN_COLUMNS)); + addColumns(scan, conf.get(SCAN_COLUMNS)); } if (conf.get(SCAN_COLUMN_FAMILY) != null) { @@ -140,4 +141,55 @@ implements Configurable { setScan(scan); } + + /** + * Parses a combined family and qualifier and adds either both or just the + * family in case there is not qualifier. This assumes the older colon + * divided notation, e.g. "data:contents" or "meta:". + *

+ * Note: It will through an error when the colon is missing. + * + * @param familyAndQualifier family and qualifier + * @return A reference to this instance. + * @throws IllegalArgumentException When the colon is missing. + */ + private static void addColumn(Scan scan, byte[] familyAndQualifier) { + byte [][] fq = KeyValue.parseColumn(familyAndQualifier); + if (fq.length > 1 && fq[1] != null && fq[1].length > 0) { + scan.addColumn(fq[0], fq[1]); + } else { + scan.addFamily(fq[0]); + } + } + + /** + * Adds an array of columns specified using old format, family:qualifier. + *

+ * Overrides previous calls to addFamily for any families in the input. + * + * @param columns array of columns, formatted as

family:qualifier
+ * @return this + */ + public static void addColumns(Scan scan, byte [][] columns) { + for (byte[] column : columns) { + addColumn(scan, column); + } + } + + /** + * Convenience method to help parse old style (or rather user entry on the + * command line) column definitions, e.g. "data:contents mime:". The columns + * must be space delimited and always have a colon (":") to denote family + * and qualifier. + * + * @param columns The columns to parse. + * @return A reference to this instance. + */ + private static void addColumns(Scan scan, String columns) { + String[] cols = columns.split(" "); + for (String col : cols) { + addColumn(scan, Bytes.toBytes(col)); + } + } + } \ No newline at end of file diff --git a/src/main/ruby/hbase/table.rb b/src/main/ruby/hbase/table.rb index f9433d45483..3341aa46707 100644 --- a/src/main/ruby/hbase/table.rb +++ b/src/main/ruby/hbase/table.rb @@ -235,7 +235,14 @@ module Hbase org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes) end - columns.each { |c| scan.addColumns(c) } + columns.each do |c| + family, qualifier = parse_column_name(c.to_s) + if qualifier + scan.addColumn(family, qualifier) + else + scan.addFamily(family) + end + end unless filter.class == String scan.setFilter(filter) diff --git a/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java index ee2b6567b72..5e9c299350c 100644 --- a/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java +++ b/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java @@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; +import org.apache.hadoop.hbase.mapreduce.TableInputFormat; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; @@ -195,7 +196,7 @@ public class TestTableMapReduce { */ private void verifyAttempt(final HTable table) throws IOException, NullPointerException { Scan scan = new Scan(); - scan.addColumns(columns); + TableInputFormat.addColumns(scan, columns); ResultScanner scanner = table.getScanner(scan); try { for (Result r : scanner) {