diff --git a/CHANGES.txt b/CHANGES.txt
index 5649395bc74..940a2602384 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -65,6 +65,8 @@ Release 0.21.0 - Unreleased
work
HBASE-1889 ClassNotFoundException on trunk for REST
HBASE-1905 Remove unused config. hbase.hstore.blockCache.blockSize
+ HBASE-1906 FilterList of prefix and columnvalue not working properly with
+ deletes and multiple values
IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable
diff --git a/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java
index d67d120a35a..46d3a72a477 100644
--- a/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java
+++ b/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java
@@ -25,10 +25,8 @@ import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
-import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
-import org.apache.hadoop.io.ObjectWritable;
/**
* This is a generic filter to be used to filter by comparison. It takes an
diff --git a/src/java/org/apache/hadoop/hbase/filter/FilterList.java b/src/java/org/apache/hadoop/hbase/filter/FilterList.java
index 2a4439b75c1..b09cfa576dc 100644
--- a/src/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/src/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -33,13 +33,14 @@ import org.apache.hadoop.io.Writable;
/**
* Implementation of {@link Filter} that represents an ordered List of Filters
- * which will be evaluated with a specified boolean operator MUST_PASS_ALL
- * (!AND) or MUST_PASS_ONE (!OR). Since you can use Filter Lists as children
- * of Filter Lists, you can create a hierarchy of filters to be evaluated.
+ * which will be evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL}
+ * (!AND
) or {@link Operator#MUST_PASS_ONE} (!OR
).
+ * Since you can use Filter Lists as children of Filter Lists, you can create a
+ * hierarchy of filters to be evaluated.
+ * Defaults to {@link Operator#MUST_PASS_ALL}.
*
TODO: Fix creation of Configuration on serialization and deserialization. */ public class FilterList implements Filter { - /** set operator */ public static enum Operator { /** !AND */ @@ -69,6 +70,15 @@ public class FilterList implements Filter { this.filters = rowFilters; } + /** + * Constructor that takes an operator. + * + * @param operator Operator to process filter set with. + */ + public FilterList(final Operator operator) { + this.operator = operator; + } + /** * Constructor that takes a set of {@link Filter}s and an operator. * @@ -115,19 +125,19 @@ public class FilterList implements Filter { public boolean filterRowKey(byte[] rowKey, int offset, int length) { for (Filter filter : filters) { - if (operator == Operator.MUST_PASS_ALL) { - if (filter.filterAllRemaining() - || filter.filterRowKey(rowKey, offset, length)) { + if (this.operator == Operator.MUST_PASS_ALL) { + if (filter.filterAllRemaining() || + filter.filterRowKey(rowKey, offset, length)) { return true; } - } else if (operator == Operator.MUST_PASS_ONE) { - if (!filter.filterAllRemaining() - && !filter.filterRowKey(rowKey, offset, length)) { + } else if (this.operator == Operator.MUST_PASS_ONE) { + if (!filter.filterAllRemaining() && + !filter.filterRowKey(rowKey, offset, length)) { return false; } } } - return operator == Operator.MUST_PASS_ONE; + return this.operator == Operator.MUST_PASS_ONE; } public boolean filterAllRemaining() { @@ -179,8 +189,7 @@ public class FilterList implements Filter { public boolean filterRow() { for (Filter filter : filters) { if (operator == Operator.MUST_PASS_ALL) { - if (filter.filterAllRemaining() - || filter.filterRow()) { + if (filter.filterAllRemaining() || filter.filterRow()) { return true; } } else if (operator == Operator.MUST_PASS_ONE) { diff --git a/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index 15902c14968..103786bdebf 100644 --- a/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -108,16 +108,19 @@ public class SingleColumnValueFilter implements Filter { this.compareOp = compareOp; this.comparator = comparator; } - + public boolean filterRowKey(byte[] rowKey, int offset, int length) { + // We don't filter on the row key... we filter later on column value so + // always return false. return false; } public ReturnCode filterKeyValue(KeyValue keyValue) { - if(matchedColumn) { + // System.out.println("REMOVE KEY=" + keyValue.toString() + ", value=" + Bytes.toString(keyValue.getValue())); + if (this.matchedColumn) { // We already found and matched the single column, all keys now pass return ReturnCode.INCLUDE; - } else if(foundColumn) { + } else if (this.foundColumn) { // We found but did not match the single column, skip to next row return ReturnCode.NEXT_ROW; } @@ -129,16 +132,18 @@ public class SingleColumnValueFilter implements Filter { keyValue.getValueOffset(), keyValue.getValueLength())) { return ReturnCode.NEXT_ROW; } - matchedColumn = true; + this.matchedColumn = true; return ReturnCode.INCLUDE; } - private boolean filterColumnValue(final byte[] data, final int offset, + private boolean filterColumnValue(final byte [] data, final int offset, final int length) { - int compareResult = comparator.compareTo(Arrays.copyOfRange(data, offset, - offset + length)); - - switch (compareOp) { + // TODO: Can this filter take a rawcomparator so don't have to make this + // byte array copy? + int compareResult = + this.comparator.compareTo(Arrays.copyOfRange(data, offset, offset + length)); + LOG.debug("compareResult=" + compareResult + " " + Bytes.toString(data, offset, length)); + switch (this.compareOp) { case LESS: return compareResult <= 0; case LESS_OR_EQUAL: @@ -163,23 +168,23 @@ public class SingleColumnValueFilter implements Filter { public boolean filterRow() { // If column was found, return false if it was matched, true if it was not // If column not found, return true if we filter if missing, false if not - return foundColumn ? !matchedColumn : filterIfMissing; + return this.foundColumn? !this.matchedColumn: this.filterIfMissing; } public void reset() { foundColumn = false; matchedColumn = false; } - + /** * Get whether entire row should be filtered if column is not found. - * @return filterIfMissing true if row should be skipped if column not found, - * false if row should be let through anyways + * @return true if row should be skipped if column not found, false if row + * should be let through anyways */ public boolean getFilterIfMissing() { return filterIfMissing; } - + /** * Set whether entire row should be filtered if column is not found. *
@@ -200,12 +205,12 @@ public class SingleColumnValueFilter implements Filter {
if(this.columnQualifier.length == 0) {
this.columnQualifier = null;
}
- compareOp = CompareOp.valueOf(in.readUTF());
- comparator = (WritableByteArrayComparable) HbaseObjectWritable.readObject(in,
- null);
- foundColumn = in.readBoolean();
- matchedColumn = in.readBoolean();
- filterIfMissing = in.readBoolean();
+ this.compareOp = CompareOp.valueOf(in.readUTF());
+ this.comparator =
+ (WritableByteArrayComparable)HbaseObjectWritable.readObject(in, null);
+ this.foundColumn = in.readBoolean();
+ this.matchedColumn = in.readBoolean();
+ this.filterIfMissing = in.readBoolean();
}
public void write(final DataOutput out) throws IOException {
@@ -218,4 +223,4 @@ public class SingleColumnValueFilter implements Filter {
out.writeBoolean(matchedColumn);
out.writeBoolean(filterIfMissing);
}
-}
+}
\ No newline at end of file
diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index 74d3a532cad..470e96c4d6b 100644
--- a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -1179,8 +1179,6 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
regionInfo.getTableDesc().getName(), kvs,
(regionInfo.isMetaRegion() || regionInfo.isRootRegion()), now);
}
-
-
flush = isFlushSize(size);
} finally {
this.updatesLock.readLock().unlock();
@@ -1744,57 +1742,58 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
* @throws IOException
*/
private boolean nextInternal() throws IOException {
- // This method should probably be reorganized a bit... has gotten messy
- KeyValue kv;
- byte[] currentRow = null;
+ byte [] currentRow = null;
boolean filterCurrentRow = false;
while (true) {
- kv = this.storeHeap.peek();
- if (kv == null) {
- return false;
- }
+ KeyValue kv = this.storeHeap.peek();
+ if (kv == null) return false;
byte [] row = kv.getRow();
- if (filterCurrentRow && Bytes.equals(currentRow, row)) {
- // filter all columns until row changes
- this.storeHeap.next(results);
- results.clear();
+ boolean samerow = Bytes.equals(currentRow, row);
+ if (samerow && filterCurrentRow) {
+ // Filter all columns until row changes
+ this.storeHeap.next(this.results);
+ this.results.clear();
continue;
}
- // see if current row should be filtered based on row key
- if (filter != null && filter.filterRowKey(row, 0, row.length)) {
- if(!results.isEmpty() && !Bytes.equals(currentRow, row)) {
- return true;
- }
- this.storeHeap.next(results);
- results.clear();
- resetFilters();
- filterCurrentRow = true;
- currentRow = row;
- continue;
- }
- if(!Bytes.equals(currentRow, row)) {
+ if (!samerow) {
// Continue on the next row:
currentRow = row;
filterCurrentRow = false;
// See if we passed stopRow
- if(stopRow != null &&
- comparator.compareRows(stopRow, 0, stopRow.length,
- currentRow, 0, currentRow.length) <= 0) {
+ if (this.stopRow != null &&
+ comparator.compareRows(this.stopRow, 0, this.stopRow.length,
+ currentRow, 0, currentRow.length) <= 0) {
return false;
}
- // if there are _no_ results or current row should be filtered
- if (results.isEmpty() || filter != null && filter.filterRow()) {
- // make sure results is empty
- results.clear();
- resetFilters();
- continue;
- }
- return true;
+ if (hasResults()) return true;
+ }
+ // See if current row should be filtered based on row key
+ if (this.filter != null && this.filter.filterRowKey(row, 0, row.length)) {
+ resetFilters();
+ filterCurrentRow = true;
+ currentRow = row;
}
this.storeHeap.next(results);
}
}
+ /*
+ * Do we have results to return or should we continue. Call when we get to
+ * the end of a row. Does house cleaning -- clearing results and resetting
+ * filters -- if we are to continue.
+ * @return True if we should return else false if need to keep going.
+ */
+ private boolean hasResults() {
+ if (this.results.isEmpty() ||
+ this.filter != null && this.filter.filterRow()) {
+ // Make sure results is empty, reset filters
+ results.clear();
+ resetFilters();
+ return false;
+ }
+ return true;
+ }
+
public void close() {
storeHeap.close();
}
@@ -2326,7 +2325,6 @@ public class HRegion implements HConstants, HeapSize { // , Writable{
store.get(get, qualifiers, results);
if (!results.isEmpty()) {
- byte [] oldValue = results.get(0).getValue();
KeyValue kv = results.get(0);
byte [] buffer = kv.getBuffer();
int valueOffset = kv.getValueOffset();
diff --git a/src/test/org/apache/hadoop/hbase/HBaseTestCase.java b/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
index a8f2b4f7585..cdb49506a71 100644
--- a/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
+++ b/src/test/org/apache/hadoop/hbase/HBaseTestCase.java
@@ -219,8 +219,7 @@ public abstract class HBaseTestCase extends TestCase {
if (startKeyBytes == null || startKeyBytes.length == 0) {
startKeyBytes = START_KEY_BYTES;
}
- return addContent(new HRegionIncommon(r), Bytes.toString(columnFamily),
- null,
+ return addContent(new HRegionIncommon(r), Bytes.toString(columnFamily), null,
startKeyBytes, endKey, -1);
}
diff --git a/src/test/org/apache/hadoop/hbase/client/TestClient.java b/src/test/org/apache/hadoop/hbase/client/TestClient.java
index 0160e719fed..87332b7562d 100644
--- a/src/test/org/apache/hadoop/hbase/client/TestClient.java
+++ b/src/test/org/apache/hadoop/hbase/client/TestClient.java
@@ -20,10 +20,6 @@
package org.apache.hadoop.hbase.client;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Map;
-
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseClusterTestCase;
@@ -35,14 +31,23 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
+import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.QualifierFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
+import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
-import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.UUID;
+
/**
* Tests from client-side of a cluster.
*/
@@ -62,6 +67,135 @@ public class TestClient extends HBaseClusterTestCase {
super();
}
+ /**
+ * Test from client side of an involved filter against a multi family that
+ * involves deletes.
+ *
+ * @throws Exception
+ */
+ public void testWeirdCacheBehaviour() throws Exception {
+ byte[] TABLE = Bytes.toBytes("testWeirdCacheBehaviour");
+ byte[][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"),
+ Bytes.toBytes("trans-type"), Bytes.toBytes("trans-date"),
+ Bytes.toBytes("trans-tags"), Bytes.toBytes("trans-group") };
+ HTable ht = createTable(TABLE, FAMILIES);
+ String value = "this is the value";
+ String value2 = "this is some other value";
+ String keyPrefix1 = UUID.randomUUID().toString();
+ String keyPrefix2 = UUID.randomUUID().toString();
+ String keyPrefix3 = UUID.randomUUID().toString();
+ putRows(ht, 3, value, keyPrefix1);
+ putRows(ht, 3, value, keyPrefix2);
+ putRows(ht, 3, value, keyPrefix3);
+ ht.flushCommits();
+ putRows(ht, 3, value2, keyPrefix1);
+ putRows(ht, 3, value2, keyPrefix2);
+ putRows(ht, 3, value2, keyPrefix3);
+ HTable table = new HTable(conf, Bytes.toBytes("testWeirdCacheBehaviour"));
+ System.out.println("Checking values for key: " + keyPrefix1);
+ assertEquals("Got back incorrect number of rows from scan", 3,
+ getNumberOfRows(keyPrefix1, value2, table));
+ System.out.println("Checking values for key: " + keyPrefix2);
+ assertEquals("Got back incorrect number of rows from scan", 3,
+ getNumberOfRows(keyPrefix2, value2, table));
+ System.out.println("Checking values for key: " + keyPrefix3);
+ assertEquals("Got back incorrect number of rows from scan", 3,
+ getNumberOfRows(keyPrefix3, value2, table));
+ deleteColumns(ht, value2, keyPrefix1);
+ deleteColumns(ht, value2, keyPrefix2);
+ deleteColumns(ht, value2, keyPrefix3);
+ System.out.println("Starting important checks.....");
+ assertEquals("Got back incorrect number of rows from scan: " + keyPrefix1,
+ 0, getNumberOfRows(keyPrefix1, value2, table));
+ assertEquals("Got back incorrect number of rows from scan: " + keyPrefix2,
+ 0, getNumberOfRows(keyPrefix2, value2, table));
+ assertEquals("Got back incorrect number of rows from scan: " + keyPrefix3,
+ 0, getNumberOfRows(keyPrefix3, value2, table));
+ ht.setScannerCaching(0);
+ assertEquals("Got back incorrect number of rows from scan", 0,
+ getNumberOfRows(keyPrefix1, value2, table)); ht.setScannerCaching(100);
+ assertEquals("Got back incorrect number of rows from scan", 0,
+ getNumberOfRows(keyPrefix2, value2, table));
+ }
+
+ private void deleteColumns(HTable ht, String value, String keyPrefix)
+ throws IOException {
+ ResultScanner scanner = buildScanner(keyPrefix, value, ht);
+ Iterator