HBASE-4436 Remove @deprecated Scan methods in 0.90 from TRUNK and 0.92

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1203959 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2011-11-19 04:45:14 +00:00
parent 95d22b9dc5
commit eba8dab3ec
6 changed files with 69 additions and 95 deletions

View File

@ -438,6 +438,7 @@ Release 0.92.0 - Unreleased
HBASE-4816 Regionserver wouldn't go down because split happened exactly at same
time we issued bulk user region close call on our way out
HBASE-4815 Disable online altering by default, create a config for it
HBASE-4623 Remove @deprecated Scan methods in 0.90 from TRUNK and 0.92
TESTS
HBASE-4450 test for number of blocks read: to serve as baseline for expected

View File

@ -609,94 +609,6 @@ public class Scan extends OperationWithAttributes implements Writable {
writeAttributes(out);
}
/**
* Parses a combined family and qualifier and adds either both or just the
* family in case there is not qualifier. This assumes the older colon
* divided notation, e.g. "data:contents" or "meta:".
* <p>
* Note: It will through an error when the colon is missing.
*
* @param familyAndQualifier family and qualifier
* @return A reference to this instance.
* @throws IllegalArgumentException When the colon is missing.
* @deprecated use {@link #addColumn(byte[], byte[])} instead
*/
public Scan addColumn(byte[] familyAndQualifier) {
byte [][] fq = KeyValue.parseColumn(familyAndQualifier);
if (fq.length > 1 && fq[1] != null && fq[1].length > 0) {
addColumn(fq[0], fq[1]);
} else {
addFamily(fq[0]);
}
return this;
}
/**
* Adds an array of columns specified using old format, family:qualifier.
* <p>
* Overrides previous calls to addFamily for any families in the input.
*
* @param columns array of columns, formatted as <pre>family:qualifier</pre>
* @deprecated issue multiple {@link #addColumn(byte[], byte[])} instead
* @return this
*/
public Scan addColumns(byte [][] columns) {
for (byte[] column : columns) {
addColumn(column);
}
return this;
}
/**
* Convenience method to help parse old style (or rather user entry on the
* command line) column definitions, e.g. "data:contents mime:". The columns
* must be space delimited and always have a colon (":") to denote family
* and qualifier.
*
* @param columns The columns to parse.
* @return A reference to this instance.
* @deprecated use {@link #addColumn(byte[], byte[])} instead
*/
public Scan addColumns(String columns) {
String[] cols = columns.split(" ");
for (String col : cols) {
addColumn(Bytes.toBytes(col));
}
return this;
}
/**
* Helps to convert the binary column families and qualifiers to a text
* representation, e.g. "data:mimetype data:contents meta:". Binary values
* are properly encoded using {@link Bytes#toBytesBinary(String)}.
*
* @return The columns in an old style string format.
* @deprecated
*/
public String getInputColumns() {
StringBuilder cols = new StringBuilder("");
for (Map.Entry<byte[], NavigableSet<byte[]>> e :
familyMap.entrySet()) {
byte[] fam = e.getKey();
if (cols.length() > 0) cols.append(" ");
NavigableSet<byte[]> quals = e.getValue();
// check if this family has qualifiers
if (quals != null && quals.size() > 0) {
StringBuilder cs = new StringBuilder("");
for (byte[] qual : quals) {
if (cs.length() > 0) cs.append(" ");
// encode values to make parsing easier later
cs.append(Bytes.toStringBinary(fam)).append(":").append(Bytes.toStringBinary(qual));
}
cols.append(cs);
} else {
// only add the family but with old style delimiter
cols.append(Bytes.toStringBinary(fam)).append(":");
}
}
return cols.toString();
}
/**
* Enable/disable "raw" mode for this scan.
* If "raw" is enabled the scan will return all

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.util.StringUtils;
@ -59,7 +60,7 @@ public class TableRecordReaderImpl {
if ((endRow != null) && (endRow.length > 0)) {
if (trrRowFilter != null) {
Scan scan = new Scan(firstRow, endRow);
scan.addColumns(trrInputColumns);
TableInputFormat.addColumns(scan, trrInputColumns);
scan.setFilter(trrRowFilter);
scan.setCacheBlocks(false);
this.scanner = this.htable.getScanner(scan);
@ -68,7 +69,7 @@ public class TableRecordReaderImpl {
Bytes.toStringBinary(firstRow) + ", endRow: " +
Bytes.toStringBinary(endRow));
Scan scan = new Scan(firstRow, endRow);
scan.addColumns(trrInputColumns);
TableInputFormat.addColumns(scan, trrInputColumns);
this.scanner = this.htable.getScanner(scan);
}
} else {
@ -76,8 +77,8 @@ public class TableRecordReaderImpl {
Bytes.toStringBinary(firstRow) + ", no endRow");
Scan scan = new Scan(firstRow);
scan.addColumns(trrInputColumns);
// scan.setFilter(trrRowFilter);
TableInputFormat.addColumns(scan, trrInputColumns);
scan.setFilter(trrRowFilter);
this.scanner = this.htable.getScanner(scan);
}
}

View File

@ -25,6 +25,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
@ -106,7 +107,7 @@ implements Configurable {
scan = new Scan();
if (conf.get(SCAN_COLUMNS) != null) {
scan.addColumns(conf.get(SCAN_COLUMNS));
addColumns(scan, conf.get(SCAN_COLUMNS));
}
if (conf.get(SCAN_COLUMN_FAMILY) != null) {
@ -140,4 +141,55 @@ implements Configurable {
setScan(scan);
}
/**
* Parses a combined family and qualifier and adds either both or just the
* family in case there is not qualifier. This assumes the older colon
* divided notation, e.g. "data:contents" or "meta:".
* <p>
* Note: It will through an error when the colon is missing.
*
* @param familyAndQualifier family and qualifier
* @return A reference to this instance.
* @throws IllegalArgumentException When the colon is missing.
*/
private static void addColumn(Scan scan, byte[] familyAndQualifier) {
byte [][] fq = KeyValue.parseColumn(familyAndQualifier);
if (fq.length > 1 && fq[1] != null && fq[1].length > 0) {
scan.addColumn(fq[0], fq[1]);
} else {
scan.addFamily(fq[0]);
}
}
/**
* Adds an array of columns specified using old format, family:qualifier.
* <p>
* Overrides previous calls to addFamily for any families in the input.
*
* @param columns array of columns, formatted as <pre>family:qualifier</pre>
* @return this
*/
public static void addColumns(Scan scan, byte [][] columns) {
for (byte[] column : columns) {
addColumn(scan, column);
}
}
/**
* Convenience method to help parse old style (or rather user entry on the
* command line) column definitions, e.g. "data:contents mime:". The columns
* must be space delimited and always have a colon (":") to denote family
* and qualifier.
*
* @param columns The columns to parse.
* @return A reference to this instance.
*/
private static void addColumns(Scan scan, String columns) {
String[] cols = columns.split(" ");
for (String col : cols) {
addColumn(scan, Bytes.toBytes(col));
}
}
}

View File

@ -235,7 +235,14 @@ module Hbase
org.apache.hadoop.hbase.client.Scan.new(startrow.to_java_bytes)
end
columns.each { |c| scan.addColumns(c) }
columns.each do |c|
family, qualifier = parse_column_name(c.to_s)
if qualifier
scan.addColumn(family, qualifier)
else
scan.addFamily(family)
end
end
unless filter.class == String
scan.setFilter(filter)

View File

@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
@ -195,7 +196,7 @@ public class TestTableMapReduce {
*/
private void verifyAttempt(final HTable table) throws IOException, NullPointerException {
Scan scan = new Scan();
scan.addColumns(columns);
TableInputFormat.addColumns(scan, columns);
ResultScanner scanner = table.getScanner(scan);
try {
for (Result r : scanner) {