diff --git a/CHANGES.txt b/CHANGES.txt index 5649395bc74..940a2602384 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -65,6 +65,8 @@ Release 0.21.0 - Unreleased work HBASE-1889 ClassNotFoundException on trunk for REST HBASE-1905 Remove unused config. hbase.hstore.blockCache.blockSize + HBASE-1906 FilterList of prefix and columnvalue not working properly with + deletes and multiple values IMPROVEMENTS HBASE-1760 Cleanup TODOs in HTable diff --git a/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java index d67d120a35a..46d3a72a477 100644 --- a/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java +++ b/src/java/org/apache/hadoop/hbase/filter/CompareFilter.java @@ -25,10 +25,8 @@ import java.io.DataOutput; import java.io.IOException; import java.util.Arrays; -import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.io.HbaseObjectWritable; -import org.apache.hadoop.io.ObjectWritable; /** * This is a generic filter to be used to filter by comparison. It takes an diff --git a/src/java/org/apache/hadoop/hbase/filter/FilterList.java b/src/java/org/apache/hadoop/hbase/filter/FilterList.java index 2a4439b75c1..b09cfa576dc 100644 --- a/src/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/src/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -33,13 +33,14 @@ import org.apache.hadoop.io.Writable; /** * Implementation of {@link Filter} that represents an ordered List of Filters - * which will be evaluated with a specified boolean operator MUST_PASS_ALL - * (!AND) or MUST_PASS_ONE (!OR). Since you can use Filter Lists as children - * of Filter Lists, you can create a hierarchy of filters to be evaluated. + * which will be evaluated with a specified boolean operator {@link Operator#MUST_PASS_ALL} + * (!AND) or {@link Operator#MUST_PASS_ONE} (!OR). + * Since you can use Filter Lists as children of Filter Lists, you can create a + * hierarchy of filters to be evaluated. + * Defaults to {@link Operator#MUST_PASS_ALL}. *

TODO: Fix creation of Configuration on serialization and deserialization. */ public class FilterList implements Filter { - /** set operator */ public static enum Operator { /** !AND */ @@ -69,6 +70,15 @@ public class FilterList implements Filter { this.filters = rowFilters; } + /** + * Constructor that takes an operator. + * + * @param operator Operator to process filter set with. + */ + public FilterList(final Operator operator) { + this.operator = operator; + } + /** * Constructor that takes a set of {@link Filter}s and an operator. * @@ -115,19 +125,19 @@ public class FilterList implements Filter { public boolean filterRowKey(byte[] rowKey, int offset, int length) { for (Filter filter : filters) { - if (operator == Operator.MUST_PASS_ALL) { - if (filter.filterAllRemaining() - || filter.filterRowKey(rowKey, offset, length)) { + if (this.operator == Operator.MUST_PASS_ALL) { + if (filter.filterAllRemaining() || + filter.filterRowKey(rowKey, offset, length)) { return true; } - } else if (operator == Operator.MUST_PASS_ONE) { - if (!filter.filterAllRemaining() - && !filter.filterRowKey(rowKey, offset, length)) { + } else if (this.operator == Operator.MUST_PASS_ONE) { + if (!filter.filterAllRemaining() && + !filter.filterRowKey(rowKey, offset, length)) { return false; } } } - return operator == Operator.MUST_PASS_ONE; + return this.operator == Operator.MUST_PASS_ONE; } public boolean filterAllRemaining() { @@ -179,8 +189,7 @@ public class FilterList implements Filter { public boolean filterRow() { for (Filter filter : filters) { if (operator == Operator.MUST_PASS_ALL) { - if (filter.filterAllRemaining() - || filter.filterRow()) { + if (filter.filterAllRemaining() || filter.filterRow()) { return true; } } else if (operator == Operator.MUST_PASS_ONE) { diff --git a/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index 15902c14968..103786bdebf 100644 --- a/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/src/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -108,16 +108,19 @@ public class SingleColumnValueFilter implements Filter { this.compareOp = compareOp; this.comparator = comparator; } - + public boolean filterRowKey(byte[] rowKey, int offset, int length) { + // We don't filter on the row key... we filter later on column value so + // always return false. return false; } public ReturnCode filterKeyValue(KeyValue keyValue) { - if(matchedColumn) { + // System.out.println("REMOVE KEY=" + keyValue.toString() + ", value=" + Bytes.toString(keyValue.getValue())); + if (this.matchedColumn) { // We already found and matched the single column, all keys now pass return ReturnCode.INCLUDE; - } else if(foundColumn) { + } else if (this.foundColumn) { // We found but did not match the single column, skip to next row return ReturnCode.NEXT_ROW; } @@ -129,16 +132,18 @@ public class SingleColumnValueFilter implements Filter { keyValue.getValueOffset(), keyValue.getValueLength())) { return ReturnCode.NEXT_ROW; } - matchedColumn = true; + this.matchedColumn = true; return ReturnCode.INCLUDE; } - private boolean filterColumnValue(final byte[] data, final int offset, + private boolean filterColumnValue(final byte [] data, final int offset, final int length) { - int compareResult = comparator.compareTo(Arrays.copyOfRange(data, offset, - offset + length)); - - switch (compareOp) { + // TODO: Can this filter take a rawcomparator so don't have to make this + // byte array copy? + int compareResult = + this.comparator.compareTo(Arrays.copyOfRange(data, offset, offset + length)); + LOG.debug("compareResult=" + compareResult + " " + Bytes.toString(data, offset, length)); + switch (this.compareOp) { case LESS: return compareResult <= 0; case LESS_OR_EQUAL: @@ -163,23 +168,23 @@ public class SingleColumnValueFilter implements Filter { public boolean filterRow() { // If column was found, return false if it was matched, true if it was not // If column not found, return true if we filter if missing, false if not - return foundColumn ? !matchedColumn : filterIfMissing; + return this.foundColumn? !this.matchedColumn: this.filterIfMissing; } public void reset() { foundColumn = false; matchedColumn = false; } - + /** * Get whether entire row should be filtered if column is not found. - * @return filterIfMissing true if row should be skipped if column not found, - * false if row should be let through anyways + * @return true if row should be skipped if column not found, false if row + * should be let through anyways */ public boolean getFilterIfMissing() { return filterIfMissing; } - + /** * Set whether entire row should be filtered if column is not found. *

@@ -200,12 +205,12 @@ public class SingleColumnValueFilter implements Filter { if(this.columnQualifier.length == 0) { this.columnQualifier = null; } - compareOp = CompareOp.valueOf(in.readUTF()); - comparator = (WritableByteArrayComparable) HbaseObjectWritable.readObject(in, - null); - foundColumn = in.readBoolean(); - matchedColumn = in.readBoolean(); - filterIfMissing = in.readBoolean(); + this.compareOp = CompareOp.valueOf(in.readUTF()); + this.comparator = + (WritableByteArrayComparable)HbaseObjectWritable.readObject(in, null); + this.foundColumn = in.readBoolean(); + this.matchedColumn = in.readBoolean(); + this.filterIfMissing = in.readBoolean(); } public void write(final DataOutput out) throws IOException { @@ -218,4 +223,4 @@ public class SingleColumnValueFilter implements Filter { out.writeBoolean(matchedColumn); out.writeBoolean(filterIfMissing); } -} +} \ No newline at end of file diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 74d3a532cad..470e96c4d6b 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -1179,8 +1179,6 @@ public class HRegion implements HConstants, HeapSize { // , Writable{ regionInfo.getTableDesc().getName(), kvs, (regionInfo.isMetaRegion() || regionInfo.isRootRegion()), now); } - - flush = isFlushSize(size); } finally { this.updatesLock.readLock().unlock(); @@ -1744,57 +1742,58 @@ public class HRegion implements HConstants, HeapSize { // , Writable{ * @throws IOException */ private boolean nextInternal() throws IOException { - // This method should probably be reorganized a bit... has gotten messy - KeyValue kv; - byte[] currentRow = null; + byte [] currentRow = null; boolean filterCurrentRow = false; while (true) { - kv = this.storeHeap.peek(); - if (kv == null) { - return false; - } + KeyValue kv = this.storeHeap.peek(); + if (kv == null) return false; byte [] row = kv.getRow(); - if (filterCurrentRow && Bytes.equals(currentRow, row)) { - // filter all columns until row changes - this.storeHeap.next(results); - results.clear(); + boolean samerow = Bytes.equals(currentRow, row); + if (samerow && filterCurrentRow) { + // Filter all columns until row changes + this.storeHeap.next(this.results); + this.results.clear(); continue; } - // see if current row should be filtered based on row key - if (filter != null && filter.filterRowKey(row, 0, row.length)) { - if(!results.isEmpty() && !Bytes.equals(currentRow, row)) { - return true; - } - this.storeHeap.next(results); - results.clear(); - resetFilters(); - filterCurrentRow = true; - currentRow = row; - continue; - } - if(!Bytes.equals(currentRow, row)) { + if (!samerow) { // Continue on the next row: currentRow = row; filterCurrentRow = false; // See if we passed stopRow - if(stopRow != null && - comparator.compareRows(stopRow, 0, stopRow.length, - currentRow, 0, currentRow.length) <= 0) { + if (this.stopRow != null && + comparator.compareRows(this.stopRow, 0, this.stopRow.length, + currentRow, 0, currentRow.length) <= 0) { return false; } - // if there are _no_ results or current row should be filtered - if (results.isEmpty() || filter != null && filter.filterRow()) { - // make sure results is empty - results.clear(); - resetFilters(); - continue; - } - return true; + if (hasResults()) return true; + } + // See if current row should be filtered based on row key + if (this.filter != null && this.filter.filterRowKey(row, 0, row.length)) { + resetFilters(); + filterCurrentRow = true; + currentRow = row; } this.storeHeap.next(results); } } + /* + * Do we have results to return or should we continue. Call when we get to + * the end of a row. Does house cleaning -- clearing results and resetting + * filters -- if we are to continue. + * @return True if we should return else false if need to keep going. + */ + private boolean hasResults() { + if (this.results.isEmpty() || + this.filter != null && this.filter.filterRow()) { + // Make sure results is empty, reset filters + results.clear(); + resetFilters(); + return false; + } + return true; + } + public void close() { storeHeap.close(); } @@ -2326,7 +2325,6 @@ public class HRegion implements HConstants, HeapSize { // , Writable{ store.get(get, qualifiers, results); if (!results.isEmpty()) { - byte [] oldValue = results.get(0).getValue(); KeyValue kv = results.get(0); byte [] buffer = kv.getBuffer(); int valueOffset = kv.getValueOffset(); diff --git a/src/test/org/apache/hadoop/hbase/HBaseTestCase.java b/src/test/org/apache/hadoop/hbase/HBaseTestCase.java index a8f2b4f7585..cdb49506a71 100644 --- a/src/test/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/src/test/org/apache/hadoop/hbase/HBaseTestCase.java @@ -219,8 +219,7 @@ public abstract class HBaseTestCase extends TestCase { if (startKeyBytes == null || startKeyBytes.length == 0) { startKeyBytes = START_KEY_BYTES; } - return addContent(new HRegionIncommon(r), Bytes.toString(columnFamily), - null, + return addContent(new HRegionIncommon(r), Bytes.toString(columnFamily), null, startKeyBytes, endKey, -1); } diff --git a/src/test/org/apache/hadoop/hbase/client/TestClient.java b/src/test/org/apache/hadoop/hbase/client/TestClient.java index 0160e719fed..87332b7562d 100644 --- a/src/test/org/apache/hadoop/hbase/client/TestClient.java +++ b/src/test/org/apache/hadoop/hbase/client/TestClient.java @@ -20,10 +20,6 @@ package org.apache.hadoop.hbase.client; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Map; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseClusterTestCase; @@ -35,14 +31,23 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.CompareFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.QualifierFilter; import org.apache.hadoop.hbase.filter.RegexStringComparator; import org.apache.hadoop.hbase.filter.RowFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.filter.WhileMatchFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.util.Bytes; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.Map; +import java.util.UUID; + /** * Tests from client-side of a cluster. */ @@ -62,6 +67,135 @@ public class TestClient extends HBaseClusterTestCase { super(); } + /** + * Test from client side of an involved filter against a multi family that + * involves deletes. + * + * @throws Exception + */ + public void testWeirdCacheBehaviour() throws Exception { + byte[] TABLE = Bytes.toBytes("testWeirdCacheBehaviour"); + byte[][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"), + Bytes.toBytes("trans-type"), Bytes.toBytes("trans-date"), + Bytes.toBytes("trans-tags"), Bytes.toBytes("trans-group") }; + HTable ht = createTable(TABLE, FAMILIES); + String value = "this is the value"; + String value2 = "this is some other value"; + String keyPrefix1 = UUID.randomUUID().toString(); + String keyPrefix2 = UUID.randomUUID().toString(); + String keyPrefix3 = UUID.randomUUID().toString(); + putRows(ht, 3, value, keyPrefix1); + putRows(ht, 3, value, keyPrefix2); + putRows(ht, 3, value, keyPrefix3); + ht.flushCommits(); + putRows(ht, 3, value2, keyPrefix1); + putRows(ht, 3, value2, keyPrefix2); + putRows(ht, 3, value2, keyPrefix3); + HTable table = new HTable(conf, Bytes.toBytes("testWeirdCacheBehaviour")); + System.out.println("Checking values for key: " + keyPrefix1); + assertEquals("Got back incorrect number of rows from scan", 3, + getNumberOfRows(keyPrefix1, value2, table)); + System.out.println("Checking values for key: " + keyPrefix2); + assertEquals("Got back incorrect number of rows from scan", 3, + getNumberOfRows(keyPrefix2, value2, table)); + System.out.println("Checking values for key: " + keyPrefix3); + assertEquals("Got back incorrect number of rows from scan", 3, + getNumberOfRows(keyPrefix3, value2, table)); + deleteColumns(ht, value2, keyPrefix1); + deleteColumns(ht, value2, keyPrefix2); + deleteColumns(ht, value2, keyPrefix3); + System.out.println("Starting important checks....."); + assertEquals("Got back incorrect number of rows from scan: " + keyPrefix1, + 0, getNumberOfRows(keyPrefix1, value2, table)); + assertEquals("Got back incorrect number of rows from scan: " + keyPrefix2, + 0, getNumberOfRows(keyPrefix2, value2, table)); + assertEquals("Got back incorrect number of rows from scan: " + keyPrefix3, + 0, getNumberOfRows(keyPrefix3, value2, table)); + ht.setScannerCaching(0); + assertEquals("Got back incorrect number of rows from scan", 0, + getNumberOfRows(keyPrefix1, value2, table)); ht.setScannerCaching(100); + assertEquals("Got back incorrect number of rows from scan", 0, + getNumberOfRows(keyPrefix2, value2, table)); + } + + private void deleteColumns(HTable ht, String value, String keyPrefix) + throws IOException { + ResultScanner scanner = buildScanner(keyPrefix, value, ht); + Iterator it = scanner.iterator(); + int count = 0; + while (it.hasNext()) { + Result result = it.next(); + Delete delete = new Delete(result.getRow()); + delete.deleteColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2")); + ht.delete(delete); + count++; + } + assertEquals("Did not perform correct number of deletes", 3, count); + } + + private int getNumberOfRows(String keyPrefix, String value, HTable ht) + throws Exception { + ResultScanner resultScanner = buildScanner(keyPrefix, value, ht); + Iterator scanner = resultScanner.iterator(); + int numberOfResults = 0; + while (scanner.hasNext()) { + Result result = scanner.next(); + System.out.println("Got back key: " + Bytes.toString(result.getRow())); + for (KeyValue kv : result.raw()) { + System.out.println("kv=" + kv.toString() + ", " + + Bytes.toString(kv.getValue())); + } + numberOfResults++; + } + return numberOfResults; + } + + private ResultScanner buildScanner(String keyPrefix, String value, HTable ht) + throws IOException { + // OurFilterList allFilters = new OurFilterList(); + FilterList allFilters = new FilterList(/* FilterList.Operator.MUST_PASS_ALL */); + allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix))); + SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes + .toBytes("trans-tags"), Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes + .toBytes(value)); + filter.setFilterIfMissing(true); + allFilters.addFilter(filter); + + // allFilters.addFilter(new + // RowExcludingSingleColumnValueFilter(Bytes.toBytes("trans-tags"), + // Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value))); + + Scan scan = new Scan(); + scan.addFamily(Bytes.toBytes("trans-blob")); + scan.addFamily(Bytes.toBytes("trans-type")); + scan.addFamily(Bytes.toBytes("trans-date")); + scan.addFamily(Bytes.toBytes("trans-tags")); + scan.addFamily(Bytes.toBytes("trans-group")); + scan.setFilter(allFilters); + + return ht.getScanner(scan); + } + + private void putRows(HTable ht, int numRows, String value, String key) + throws IOException { + for (int i = 0; i < numRows; i++) { + String row = key + "_" + UUID.randomUUID().toString(); + System.out.println(String.format("Saving row: %s, with value %s", row, + value)); + Put put = new Put(Bytes.toBytes(row)); + put.add(Bytes.toBytes("trans-blob"), null, Bytes + .toBytes("value for blob")); + put.add(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement")); + put.add(Bytes.toBytes("trans-date"), null, Bytes + .toBytes("20090921010101999")); + put.add(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), Bytes + .toBytes(value)); + put.add(Bytes.toBytes("trans-group"), null, Bytes + .toBytes("adhocTransactionGroupId")); + ht.put(put); + } + } + /** * Test filters when multiple regions. It does counts. Needs eye-balling of * logs to ensure that we're not scanning more regions that we're supposed to. @@ -249,6 +383,7 @@ public class TestClient extends HBaseClusterTestCase { scanner.close(); System.out.println("Done."); } + public void testFilters() throws Exception { byte [] TABLE = Bytes.toBytes("testFilters"); HTable ht = createTable(TABLE, FAMILY); @@ -2671,7 +2806,7 @@ public class TestClient extends HBaseClusterTestCase { } private byte [][] makeN(byte [] base, int n) { - if(n > 256) { + if (n > 256) { return makeNBig(base, n); } byte [][] ret = new byte[n][]; diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 68a0a5c93a9..4472c3f1aa2 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -40,6 +40,10 @@ import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; +import org.apache.hadoop.hbase.filter.FilterList; +import org.apache.hadoop.hbase.filter.PrefixFilter; +import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; +import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.regionserver.HRegion.RegionScanner; import org.apache.hadoop.hbase.util.Bytes; @@ -79,6 +83,127 @@ public class TestHRegion extends HBaseTestCase { // individual code pieces in the HRegion. Putting files locally in // /tmp/testtable ////////////////////////////////////////////////////////////////////////////// + + + /** + * An involved filter test. Has multiple column families and deletes in mix. + */ + public void testWeirdCacheBehaviour() throws Exception { + byte[] TABLE = Bytes.toBytes("testWeirdCacheBehaviour"); + byte[][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"), + Bytes.toBytes("trans-type"), Bytes.toBytes("trans-date"), + Bytes.toBytes("trans-tags"), Bytes.toBytes("trans-group") }; + initHRegion(TABLE, getName(), FAMILIES); + String value = "this is the value"; + String value2 = "this is some other value"; + String keyPrefix1 = "prefix1"; // UUID.randomUUID().toString(); + String keyPrefix2 = "prefix2"; // UUID.randomUUID().toString(); + String keyPrefix3 = "prefix3"; // UUID.randomUUID().toString(); + putRows(this.region, 3, value, keyPrefix1); + putRows(this.region, 3, value, keyPrefix2); + putRows(this.region, 3, value, keyPrefix3); + // this.region.flushCommits(); + putRows(this.region, 3, value2, keyPrefix1); + putRows(this.region, 3, value2, keyPrefix2); + putRows(this.region, 3, value2, keyPrefix3); + System.out.println("Checking values for key: " + keyPrefix1); + assertEquals("Got back incorrect number of rows from scan", 3, + getNumberOfRows(keyPrefix1, value2, this.region)); + System.out.println("Checking values for key: " + keyPrefix2); + assertEquals("Got back incorrect number of rows from scan", 3, + getNumberOfRows(keyPrefix2, value2, this.region)); + System.out.println("Checking values for key: " + keyPrefix3); + assertEquals("Got back incorrect number of rows from scan", 3, + getNumberOfRows(keyPrefix3, value2, this.region)); + deleteColumns(this.region, value2, keyPrefix1); + deleteColumns(this.region, value2, keyPrefix2); + deleteColumns(this.region, value2, keyPrefix3); + System.out.println("Starting important checks....."); + assertEquals("Got back incorrect number of rows from scan: " + keyPrefix1, + 0, getNumberOfRows(keyPrefix1, value2, this.region)); + assertEquals("Got back incorrect number of rows from scan: " + keyPrefix2, + 0, getNumberOfRows(keyPrefix2, value2, this.region)); + assertEquals("Got back incorrect number of rows from scan: " + keyPrefix3, + 0, getNumberOfRows(keyPrefix3, value2, this.region)); + } + + private void deleteColumns(HRegion r, String value, String keyPrefix) + throws IOException { + InternalScanner scanner = buildScanner(keyPrefix, value, r); + int count = 0; + boolean more = false; + List results = new ArrayList(); + do { + more = scanner.next(results); + if (results != null && !results.isEmpty()) + count++; + else + break; + Delete delete = new Delete(results.get(0).getRow()); + delete.deleteColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2")); + r.delete(delete, null, false); + results.clear(); + } while (more); + assertEquals("Did not perform correct number of deletes", 3, count); + } + + private int getNumberOfRows(String keyPrefix, String value, HRegion r) throws Exception { + InternalScanner resultScanner = buildScanner(keyPrefix, value, r); + int numberOfResults = 0; + List results = new ArrayList(); + boolean more = false; + do { + more = resultScanner.next(results); + if (results != null && !results.isEmpty()) numberOfResults++; + else break; + for (KeyValue kv: results) { + System.out.println("kv=" + kv.toString() + ", " + Bytes.toString(kv.getValue())); + } + results.clear(); + } while(more); + return numberOfResults; + } + + private InternalScanner buildScanner(String keyPrefix, String value, HRegion r) + throws IOException { + // Defaults FilterList.Operator.MUST_PASS_ALL. + FilterList allFilters = new FilterList(); + allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix))); + // Only return rows where this column value exists in the row. + SingleColumnValueFilter filter = + new SingleColumnValueFilter(Bytes.toBytes("trans-tags"), + Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value)); + filter.setFilterIfMissing(true); + allFilters.addFilter(filter); + Scan scan = new Scan(); + scan.addFamily(Bytes.toBytes("trans-blob")); + scan.addFamily(Bytes.toBytes("trans-type")); + scan.addFamily(Bytes.toBytes("trans-date")); + scan.addFamily(Bytes.toBytes("trans-tags")); + scan.addFamily(Bytes.toBytes("trans-group")); + scan.setFilter(allFilters); + return r.getScanner(scan); + } + + private void putRows(HRegion r, int numRows, String value, String key) + throws IOException { + for (int i = 0; i < numRows; i++) { + String row = key + "_" + i/* UUID.randomUUID().toString() */; + System.out.println(String.format("Saving row: %s, with value %s", row, + value)); + Put put = new Put(Bytes.toBytes(row)); + put.add(Bytes.toBytes("trans-blob"), null, + Bytes.toBytes("value for blob")); + put.add(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement")); + put.add(Bytes.toBytes("trans-date"), null, + Bytes.toBytes("20090921010101999")); + put.add(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), + Bytes.toBytes(value)); + put.add(Bytes.toBytes("trans-group"), null, + Bytes.toBytes("adhocTransactionGroupId")); + r.put(put); + } + } public void testFamilyWithAndWithoutColon() throws Exception { byte [] b = Bytes.toBytes(getName()); @@ -1781,7 +1906,8 @@ public class TestHRegion extends HBaseTestCase { } private void initHRegion (byte [] tableName, String callingMethod, - HBaseConfiguration conf, byte [] ... families) throws IOException{ + HBaseConfiguration conf, byte [] ... families) + throws IOException{ HTableDescriptor htd = new HTableDescriptor(tableName); for(byte [] family : families) { htd.addFamily(new HColumnDescriptor(family)); diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java b/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java index 51a214eccc7..ab023794c65 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java @@ -176,9 +176,11 @@ public class TestScanner extends HBaseTestCase { try { this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null); addContent(this.r, HConstants.CATALOG_FAMILY); - Filter newFilter = new PrefixFilter(Bytes.toBytes("ab")); + byte [] prefix = Bytes.toBytes("ab"); + Filter newFilter = new PrefixFilter(prefix); Scan scan = new Scan(); scan.setFilter(newFilter); + scan.setStartRow(prefix); rowPrefixFilter(scan); byte[] stopRow = Bytes.toBytes("bbc");