HBASE-1906 FilterList of prefix and columnvalue not working properly with deletes and multiple values

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@825237 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-10-14 19:37:44 +00:00
parent 2929b1bd38
commit cb4c765635
9 changed files with 358 additions and 84 deletions

View File

@ -65,6 +65,8 @@ Release 0.21.0 - Unreleased
work work
HBASE-1889 ClassNotFoundException on trunk for REST HBASE-1889 ClassNotFoundException on trunk for REST
HBASE-1905 Remove unused config. hbase.hstore.blockCache.blockSize HBASE-1905 Remove unused config. hbase.hstore.blockCache.blockSize
HBASE-1906 FilterList of prefix and columnvalue not working properly with
deletes and multiple values
IMPROVEMENTS IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable HBASE-1760 Cleanup TODOs in HTable

View File

@ -25,10 +25,8 @@ import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.HbaseObjectWritable; import org.apache.hadoop.hbase.io.HbaseObjectWritable;
import org.apache.hadoop.io.ObjectWritable;
/** /**
* This is a generic filter to be used to filter by comparison. It takes an * This is a generic filter to be used to filter by comparison. It takes an

View File

@ -33,13 +33,14 @@ import org.apache.hadoop.io.Writable;
/** /**
* Implementation of {@link Filter} that represents an ordered List of Filters * Implementation of {@link Filter} that represents an ordered List of Filters
* which will be evaluated with a specified boolean operator MUST_PASS_ALL * which will be evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL}
* (!AND) or MUST_PASS_ONE (!OR). Since you can use Filter Lists as children * (<code>!AND</code>) or {@link Operator#MUST_PASS_ONE} (<code>!OR</code>).
* of Filter Lists, you can create a hierarchy of filters to be evaluated. * Since you can use Filter Lists as children of Filter Lists, you can create a
* hierarchy of filters to be evaluated.
* Defaults to {@link Operator#MUST_PASS_ALL}.
* <p>TODO: Fix creation of Configuration on serialization and deserialization. * <p>TODO: Fix creation of Configuration on serialization and deserialization.
*/ */
public class FilterList implements Filter { public class FilterList implements Filter {
/** set operator */ /** set operator */
public static enum Operator { public static enum Operator {
/** !AND */ /** !AND */
@ -69,6 +70,15 @@ public class FilterList implements Filter {
this.filters = rowFilters; this.filters = rowFilters;
} }
/**
* Constructor that takes an operator.
*
* @param operator Operator to process filter set with.
*/
public FilterList(final Operator operator) {
this.operator = operator;
}
/** /**
* Constructor that takes a set of {@link Filter}s and an operator. * Constructor that takes a set of {@link Filter}s and an operator.
* *
@ -115,19 +125,19 @@ public class FilterList implements Filter {
public boolean filterRowKey(byte[] rowKey, int offset, int length) { public boolean filterRowKey(byte[] rowKey, int offset, int length) {
for (Filter filter : filters) { for (Filter filter : filters) {
if (operator == Operator.MUST_PASS_ALL) { if (this.operator == Operator.MUST_PASS_ALL) {
if (filter.filterAllRemaining() if (filter.filterAllRemaining() ||
|| filter.filterRowKey(rowKey, offset, length)) { filter.filterRowKey(rowKey, offset, length)) {
return true; return true;
} }
} else if (operator == Operator.MUST_PASS_ONE) { } else if (this.operator == Operator.MUST_PASS_ONE) {
if (!filter.filterAllRemaining() if (!filter.filterAllRemaining() &&
&& !filter.filterRowKey(rowKey, offset, length)) { !filter.filterRowKey(rowKey, offset, length)) {
return false; return false;
} }
} }
} }
return operator == Operator.MUST_PASS_ONE; return this.operator == Operator.MUST_PASS_ONE;
} }
public boolean filterAllRemaining() { public boolean filterAllRemaining() {
@ -179,8 +189,7 @@ public class FilterList implements Filter {
public boolean filterRow() { public boolean filterRow() {
for (Filter filter : filters) { for (Filter filter : filters) {
if (operator == Operator.MUST_PASS_ALL) { if (operator == Operator.MUST_PASS_ALL) {
if (filter.filterAllRemaining() if (filter.filterAllRemaining() || filter.filterRow()) {
|| filter.filterRow()) {
return true; return true;
} }
} else if (operator == Operator.MUST_PASS_ONE) { } else if (operator == Operator.MUST_PASS_ONE) {

View File

@ -110,14 +110,17 @@ public class SingleColumnValueFilter implements Filter {
} }
public boolean filterRowKey(byte[] rowKey, int offset, int length) { public boolean filterRowKey(byte[] rowKey, int offset, int length) {
// We don't filter on the row key... we filter later on column value so
// always return false.
return false; return false;
} }
public ReturnCode filterKeyValue(KeyValue keyValue) { public ReturnCode filterKeyValue(KeyValue keyValue) {
if(matchedColumn) { // System.out.println("REMOVE KEY=" + keyValue.toString() + ", value=" + Bytes.toString(keyValue.getValue()));
if (this.matchedColumn) {
// We already found and matched the single column, all keys now pass // We already found and matched the single column, all keys now pass
return ReturnCode.INCLUDE; return ReturnCode.INCLUDE;
} else if(foundColumn) { } else if (this.foundColumn) {
// We found but did not match the single column, skip to next row // We found but did not match the single column, skip to next row
return ReturnCode.NEXT_ROW; return ReturnCode.NEXT_ROW;
} }
@ -129,16 +132,18 @@ public class SingleColumnValueFilter implements Filter {
keyValue.getValueOffset(), keyValue.getValueLength())) { keyValue.getValueOffset(), keyValue.getValueLength())) {
return ReturnCode.NEXT_ROW; return ReturnCode.NEXT_ROW;
} }
matchedColumn = true; this.matchedColumn = true;
return ReturnCode.INCLUDE; return ReturnCode.INCLUDE;
} }
private boolean filterColumnValue(final byte[] data, final int offset, private boolean filterColumnValue(final byte [] data, final int offset,
final int length) { final int length) {
int compareResult = comparator.compareTo(Arrays.copyOfRange(data, offset, // TODO: Can this filter take a rawcomparator so don't have to make this
offset + length)); // byte array copy?
int compareResult =
switch (compareOp) { this.comparator.compareTo(Arrays.copyOfRange(data, offset, offset + length));
LOG.debug("compareResult=" + compareResult + " " + Bytes.toString(data, offset, length));
switch (this.compareOp) {
case LESS: case LESS:
return compareResult <= 0; return compareResult <= 0;
case LESS_OR_EQUAL: case LESS_OR_EQUAL:
@ -163,7 +168,7 @@ public class SingleColumnValueFilter implements Filter {
public boolean filterRow() { public boolean filterRow() {
// If column was found, return false if it was matched, true if it was not // If column was found, return false if it was matched, true if it was not
// If column not found, return true if we filter if missing, false if not // If column not found, return true if we filter if missing, false if not
return foundColumn ? !matchedColumn : filterIfMissing; return this.foundColumn? !this.matchedColumn: this.filterIfMissing;
} }
public void reset() { public void reset() {
@ -173,8 +178,8 @@ public class SingleColumnValueFilter implements Filter {
/** /**
* Get whether entire row should be filtered if column is not found. * Get whether entire row should be filtered if column is not found.
* @return filterIfMissing true if row should be skipped if column not found, * @return true if row should be skipped if column not found, false if row
* false if row should be let through anyways * should be let through anyways
*/ */
public boolean getFilterIfMissing() { public boolean getFilterIfMissing() {
return filterIfMissing; return filterIfMissing;
@ -200,12 +205,12 @@ public class SingleColumnValueFilter implements Filter {
if(this.columnQualifier.length == 0) { if(this.columnQualifier.length == 0) {
this.columnQualifier = null; this.columnQualifier = null;
} }
compareOp = CompareOp.valueOf(in.readUTF()); this.compareOp = CompareOp.valueOf(in.readUTF());
comparator = (WritableByteArrayComparable) HbaseObjectWritable.readObject(in, this.comparator =
null); (WritableByteArrayComparable)HbaseObjectWritable.readObject(in, null);
foundColumn = in.readBoolean(); this.foundColumn = in.readBoolean();
matchedColumn = in.readBoolean(); this.matchedColumn = in.readBoolean();
filterIfMissing = in.readBoolean(); this.filterIfMissing = in.readBoolean();
} }
public void write(final DataOutput out) throws IOException { public void write(final DataOutput out) throws IOException {

View File

@ -1179,8 +1179,6 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
regionInfo.getTableDesc().getName(), kvs, regionInfo.getTableDesc().getName(), kvs,
(regionInfo.isMetaRegion() || regionInfo.isRootRegion()), now); (regionInfo.isMetaRegion() || regionInfo.isRootRegion()), now);
} }
flush = isFlushSize(size); flush = isFlushSize(size);
} finally { } finally {
this.updatesLock.readLock().unlock(); this.updatesLock.readLock().unlock();
@ -1744,57 +1742,58 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
* @throws IOException * @throws IOException
*/ */
private boolean nextInternal() throws IOException { private boolean nextInternal() throws IOException {
// This method should probably be reorganized a bit... has gotten messy byte [] currentRow = null;
KeyValue kv;
byte[] currentRow = null;
boolean filterCurrentRow = false; boolean filterCurrentRow = false;
while (true) { while (true) {
kv = this.storeHeap.peek(); KeyValue kv = this.storeHeap.peek();
if (kv == null) { if (kv == null) return false;
return false;
}
byte [] row = kv.getRow(); byte [] row = kv.getRow();
if (filterCurrentRow && Bytes.equals(currentRow, row)) { boolean samerow = Bytes.equals(currentRow, row);
// filter all columns until row changes if (samerow && filterCurrentRow) {
this.storeHeap.next(results); // Filter all columns until row changes
results.clear(); this.storeHeap.next(this.results);
this.results.clear();
continue; continue;
} }
// see if current row should be filtered based on row key if (!samerow) {
if (filter != null && filter.filterRowKey(row, 0, row.length)) {
if(!results.isEmpty() && !Bytes.equals(currentRow, row)) {
return true;
}
this.storeHeap.next(results);
results.clear();
resetFilters();
filterCurrentRow = true;
currentRow = row;
continue;
}
if(!Bytes.equals(currentRow, row)) {
// Continue on the next row: // Continue on the next row:
currentRow = row; currentRow = row;
filterCurrentRow = false; filterCurrentRow = false;
// See if we passed stopRow // See if we passed stopRow
if(stopRow != null && if (this.stopRow != null &&
comparator.compareRows(stopRow, 0, stopRow.length, comparator.compareRows(this.stopRow, 0, this.stopRow.length,
currentRow, 0, currentRow.length) <= 0) { currentRow, 0, currentRow.length) <= 0) {
return false; return false;
} }
// if there are _no_ results or current row should be filtered if (hasResults()) return true;
if (results.isEmpty() || filter != null && filter.filterRow()) { }
// make sure results is empty // See if current row should be filtered based on row key
results.clear(); if (this.filter != null && this.filter.filterRowKey(row, 0, row.length)) {
resetFilters(); resetFilters();
continue; filterCurrentRow = true;
} currentRow = row;
return true;
} }
this.storeHeap.next(results); this.storeHeap.next(results);
} }
} }
/*
* Do we have results to return or should we continue. Call when we get to
* the end of a row. Does house cleaning -- clearing results and resetting
* filters -- if we are to continue.
* @return True if we should return else false if need to keep going.
*/
private boolean hasResults() {
if (this.results.isEmpty() ||
this.filter != null && this.filter.filterRow()) {
// Make sure results is empty, reset filters
results.clear();
resetFilters();
return false;
}
return true;
}
public void close() { public void close() {
storeHeap.close(); storeHeap.close();
} }
@ -2326,7 +2325,6 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
store.get(get, qualifiers, results); store.get(get, qualifiers, results);
if (!results.isEmpty()) { if (!results.isEmpty()) {
byte [] oldValue = results.get(0).getValue();
KeyValue kv = results.get(0); KeyValue kv = results.get(0);
byte [] buffer = kv.getBuffer(); byte [] buffer = kv.getBuffer();
int valueOffset = kv.getValueOffset(); int valueOffset = kv.getValueOffset();

View File

@ -219,8 +219,7 @@ public abstract class HBaseTestCase extends TestCase {
if (startKeyBytes == null || startKeyBytes.length == 0) { if (startKeyBytes == null || startKeyBytes.length == 0) {
startKeyBytes = START_KEY_BYTES; startKeyBytes = START_KEY_BYTES;
} }
return addContent(new HRegionIncommon(r), Bytes.toString(columnFamily), return addContent(new HRegionIncommon(r), Bytes.toString(columnFamily), null,
null,
startKeyBytes, endKey, -1); startKeyBytes, endKey, -1);
} }

View File

@ -20,10 +20,6 @@
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseClusterTestCase; import org.apache.hadoop.hbase.HBaseClusterTestCase;
@ -35,14 +31,23 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.QualifierFilter; import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator; import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter; import org.apache.hadoop.hbase.filter.WhileMatchFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Map;
import java.util.UUID;
/** /**
* Tests from client-side of a cluster. * Tests from client-side of a cluster.
*/ */
@ -62,6 +67,135 @@ public class TestClient extends HBaseClusterTestCase {
super(); super();
} }
/**
* Test from client side of an involved filter against a multi family that
* involves deletes.
*
* @throws Exception
*/
public void testWeirdCacheBehaviour() throws Exception {
byte[] TABLE = Bytes.toBytes("testWeirdCacheBehaviour");
byte[][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"),
Bytes.toBytes("trans-type"), Bytes.toBytes("trans-date"),
Bytes.toBytes("trans-tags"), Bytes.toBytes("trans-group") };
HTable ht = createTable(TABLE, FAMILIES);
String value = "this is the value";
String value2 = "this is some other value";
String keyPrefix1 = UUID.randomUUID().toString();
String keyPrefix2 = UUID.randomUUID().toString();
String keyPrefix3 = UUID.randomUUID().toString();
putRows(ht, 3, value, keyPrefix1);
putRows(ht, 3, value, keyPrefix2);
putRows(ht, 3, value, keyPrefix3);
ht.flushCommits();
putRows(ht, 3, value2, keyPrefix1);
putRows(ht, 3, value2, keyPrefix2);
putRows(ht, 3, value2, keyPrefix3);
HTable table = new HTable(conf, Bytes.toBytes("testWeirdCacheBehaviour"));
System.out.println("Checking values for key: " + keyPrefix1);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix1, value2, table));
System.out.println("Checking values for key: " + keyPrefix2);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix2, value2, table));
System.out.println("Checking values for key: " + keyPrefix3);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix3, value2, table));
deleteColumns(ht, value2, keyPrefix1);
deleteColumns(ht, value2, keyPrefix2);
deleteColumns(ht, value2, keyPrefix3);
System.out.println("Starting important checks.....");
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix1,
0, getNumberOfRows(keyPrefix1, value2, table));
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix2,
0, getNumberOfRows(keyPrefix2, value2, table));
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix3,
0, getNumberOfRows(keyPrefix3, value2, table));
ht.setScannerCaching(0);
assertEquals("Got back incorrect number of rows from scan", 0,
getNumberOfRows(keyPrefix1, value2, table)); ht.setScannerCaching(100);
assertEquals("Got back incorrect number of rows from scan", 0,
getNumberOfRows(keyPrefix2, value2, table));
}
private void deleteColumns(HTable ht, String value, String keyPrefix)
throws IOException {
ResultScanner scanner = buildScanner(keyPrefix, value, ht);
Iterator<Result> it = scanner.iterator();
int count = 0;
while (it.hasNext()) {
Result result = it.next();
Delete delete = new Delete(result.getRow());
delete.deleteColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"));
ht.delete(delete);
count++;
}
assertEquals("Did not perform correct number of deletes", 3, count);
}
private int getNumberOfRows(String keyPrefix, String value, HTable ht)
throws Exception {
ResultScanner resultScanner = buildScanner(keyPrefix, value, ht);
Iterator<Result> scanner = resultScanner.iterator();
int numberOfResults = 0;
while (scanner.hasNext()) {
Result result = scanner.next();
System.out.println("Got back key: " + Bytes.toString(result.getRow()));
for (KeyValue kv : result.raw()) {
System.out.println("kv=" + kv.toString() + ", "
+ Bytes.toString(kv.getValue()));
}
numberOfResults++;
}
return numberOfResults;
}
private ResultScanner buildScanner(String keyPrefix, String value, HTable ht)
throws IOException {
// OurFilterList allFilters = new OurFilterList();
FilterList allFilters = new FilterList(/* FilterList.Operator.MUST_PASS_ALL */);
allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes
.toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes
.toBytes(value));
filter.setFilterIfMissing(true);
allFilters.addFilter(filter);
// allFilters.addFilter(new
// RowExcludingSingleColumnValueFilter(Bytes.toBytes("trans-tags"),
// Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value)));
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("trans-blob"));
scan.addFamily(Bytes.toBytes("trans-type"));
scan.addFamily(Bytes.toBytes("trans-date"));
scan.addFamily(Bytes.toBytes("trans-tags"));
scan.addFamily(Bytes.toBytes("trans-group"));
scan.setFilter(allFilters);
return ht.getScanner(scan);
}
private void putRows(HTable ht, int numRows, String value, String key)
throws IOException {
for (int i = 0; i < numRows; i++) {
String row = key + "_" + UUID.randomUUID().toString();
System.out.println(String.format("Saving row: %s, with value %s", row,
value));
Put put = new Put(Bytes.toBytes(row));
put.add(Bytes.toBytes("trans-blob"), null, Bytes
.toBytes("value for blob"));
put.add(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement"));
put.add(Bytes.toBytes("trans-date"), null, Bytes
.toBytes("20090921010101999"));
put.add(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), Bytes
.toBytes(value));
put.add(Bytes.toBytes("trans-group"), null, Bytes
.toBytes("adhocTransactionGroupId"));
ht.put(put);
}
}
/** /**
* Test filters when multiple regions. It does counts. Needs eye-balling of * Test filters when multiple regions. It does counts. Needs eye-balling of
* logs to ensure that we're not scanning more regions that we're supposed to. * logs to ensure that we're not scanning more regions that we're supposed to.
@ -249,6 +383,7 @@ public class TestClient extends HBaseClusterTestCase {
scanner.close(); scanner.close();
System.out.println("Done."); System.out.println("Done.");
} }
public void testFilters() throws Exception { public void testFilters() throws Exception {
byte [] TABLE = Bytes.toBytes("testFilters"); byte [] TABLE = Bytes.toBytes("testFilters");
HTable ht = createTable(TABLE, FAMILY); HTable ht = createTable(TABLE, FAMILY);
@ -2671,7 +2806,7 @@ public class TestClient extends HBaseClusterTestCase {
} }
private byte [][] makeN(byte [] base, int n) { private byte [][] makeN(byte [] base, int n) {
if(n > 256) { if (n > 256) {
return makeNBig(base, n); return makeNBig(base, n);
} }
byte [][] ret = new byte[n][]; byte [][] ret = new byte[n][];

View File

@ -40,6 +40,10 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.regionserver.HRegion.RegionScanner; import org.apache.hadoop.hbase.regionserver.HRegion.RegionScanner;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -80,6 +84,127 @@ public class TestHRegion extends HBaseTestCase {
// /tmp/testtable // /tmp/testtable
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
/**
* An involved filter test. Has multiple column families and deletes in mix.
*/
public void testWeirdCacheBehaviour() throws Exception {
byte[] TABLE = Bytes.toBytes("testWeirdCacheBehaviour");
byte[][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"),
Bytes.toBytes("trans-type"), Bytes.toBytes("trans-date"),
Bytes.toBytes("trans-tags"), Bytes.toBytes("trans-group") };
initHRegion(TABLE, getName(), FAMILIES);
String value = "this is the value";
String value2 = "this is some other value";
String keyPrefix1 = "prefix1"; // UUID.randomUUID().toString();
String keyPrefix2 = "prefix2"; // UUID.randomUUID().toString();
String keyPrefix3 = "prefix3"; // UUID.randomUUID().toString();
putRows(this.region, 3, value, keyPrefix1);
putRows(this.region, 3, value, keyPrefix2);
putRows(this.region, 3, value, keyPrefix3);
// this.region.flushCommits();
putRows(this.region, 3, value2, keyPrefix1);
putRows(this.region, 3, value2, keyPrefix2);
putRows(this.region, 3, value2, keyPrefix3);
System.out.println("Checking values for key: " + keyPrefix1);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix1, value2, this.region));
System.out.println("Checking values for key: " + keyPrefix2);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix2, value2, this.region));
System.out.println("Checking values for key: " + keyPrefix3);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix3, value2, this.region));
deleteColumns(this.region, value2, keyPrefix1);
deleteColumns(this.region, value2, keyPrefix2);
deleteColumns(this.region, value2, keyPrefix3);
System.out.println("Starting important checks.....");
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix1,
0, getNumberOfRows(keyPrefix1, value2, this.region));
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix2,
0, getNumberOfRows(keyPrefix2, value2, this.region));
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix3,
0, getNumberOfRows(keyPrefix3, value2, this.region));
}
private void deleteColumns(HRegion r, String value, String keyPrefix)
throws IOException {
InternalScanner scanner = buildScanner(keyPrefix, value, r);
int count = 0;
boolean more = false;
List<KeyValue> results = new ArrayList<KeyValue>();
do {
more = scanner.next(results);
if (results != null && !results.isEmpty())
count++;
else
break;
Delete delete = new Delete(results.get(0).getRow());
delete.deleteColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"));
r.delete(delete, null, false);
results.clear();
} while (more);
assertEquals("Did not perform correct number of deletes", 3, count);
}
private int getNumberOfRows(String keyPrefix, String value, HRegion r) throws Exception {
InternalScanner resultScanner = buildScanner(keyPrefix, value, r);
int numberOfResults = 0;
List<KeyValue> results = new ArrayList<KeyValue>();
boolean more = false;
do {
more = resultScanner.next(results);
if (results != null && !results.isEmpty()) numberOfResults++;
else break;
for (KeyValue kv: results) {
System.out.println("kv=" + kv.toString() + ", " + Bytes.toString(kv.getValue()));
}
results.clear();
} while(more);
return numberOfResults;
}
private InternalScanner buildScanner(String keyPrefix, String value, HRegion r)
throws IOException {
// Defaults FilterList.Operator.MUST_PASS_ALL.
FilterList allFilters = new FilterList();
allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
// Only return rows where this column value exists in the row.
SingleColumnValueFilter filter =
new SingleColumnValueFilter(Bytes.toBytes("trans-tags"),
Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
filter.setFilterIfMissing(true);
allFilters.addFilter(filter);
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("trans-blob"));
scan.addFamily(Bytes.toBytes("trans-type"));
scan.addFamily(Bytes.toBytes("trans-date"));
scan.addFamily(Bytes.toBytes("trans-tags"));
scan.addFamily(Bytes.toBytes("trans-group"));
scan.setFilter(allFilters);
return r.getScanner(scan);
}
private void putRows(HRegion r, int numRows, String value, String key)
throws IOException {
for (int i = 0; i < numRows; i++) {
String row = key + "_" + i/* UUID.randomUUID().toString() */;
System.out.println(String.format("Saving row: %s, with value %s", row,
value));
Put put = new Put(Bytes.toBytes(row));
put.add(Bytes.toBytes("trans-blob"), null,
Bytes.toBytes("value for blob"));
put.add(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement"));
put.add(Bytes.toBytes("trans-date"), null,
Bytes.toBytes("20090921010101999"));
put.add(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"),
Bytes.toBytes(value));
put.add(Bytes.toBytes("trans-group"), null,
Bytes.toBytes("adhocTransactionGroupId"));
r.put(put);
}
}
public void testFamilyWithAndWithoutColon() throws Exception { public void testFamilyWithAndWithoutColon() throws Exception {
byte [] b = Bytes.toBytes(getName()); byte [] b = Bytes.toBytes(getName());
byte [] cf = Bytes.toBytes("cf"); byte [] cf = Bytes.toBytes("cf");
@ -1781,7 +1906,8 @@ public class TestHRegion extends HBaseTestCase {
} }
private void initHRegion (byte [] tableName, String callingMethod, private void initHRegion (byte [] tableName, String callingMethod,
HBaseConfiguration conf, byte [] ... families) throws IOException{ HBaseConfiguration conf, byte [] ... families)
throws IOException{
HTableDescriptor htd = new HTableDescriptor(tableName); HTableDescriptor htd = new HTableDescriptor(tableName);
for(byte [] family : families) { for(byte [] family : families) {
htd.addFamily(new HColumnDescriptor(family)); htd.addFamily(new HColumnDescriptor(family));

View File

@ -176,9 +176,11 @@ public class TestScanner extends HBaseTestCase {
try { try {
this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null); this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null);
addContent(this.r, HConstants.CATALOG_FAMILY); addContent(this.r, HConstants.CATALOG_FAMILY);
Filter newFilter = new PrefixFilter(Bytes.toBytes("ab")); byte [] prefix = Bytes.toBytes("ab");
Filter newFilter = new PrefixFilter(prefix);
Scan scan = new Scan(); Scan scan = new Scan();
scan.setFilter(newFilter); scan.setFilter(newFilter);
scan.setStartRow(prefix);
rowPrefixFilter(scan); rowPrefixFilter(scan);
byte[] stopRow = Bytes.toBytes("bbc"); byte[] stopRow = Bytes.toBytes("bbc");