results) {
- if (true) throw new RuntimeException("Not Yet Implemented");
- return false;
+ boolean resultFound = false;
+ boolean result = operator == Operator.MUST_PASS_ONE;
+ for (RowFilterInterface filter : filters) {
+ if (!resultFound) {
+ if (operator == Operator.MUST_PASS_ALL) {
+ if (filter.filterAllRemaining() || filter.filterRow(results)) {
+ result = true;
+ resultFound = true;
+ }
+ } else if (operator == Operator.MUST_PASS_ONE) {
+ if (!filter.filterAllRemaining() && !filter.filterRow(results)) {
+ result = false;
+ resultFound = true;
+ }
+ }
+ } else if (filter.processAlways()) {
+ filter.filterRow(results);
+ }
+ }
+ return result;
}
public void readFields(final DataInput in) throws IOException {
diff --git a/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java b/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java
index 7946e4f76aa..5747178959d 100644
--- a/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java
+++ b/src/java/org/apache/hadoop/hbase/filter/StopRowFilter.java
@@ -97,7 +97,8 @@ public class StopRowFilter implements RowFilterInterface {
}
return false;
}
- return Bytes.compareTo(stopRowKey, rowKey) <= 0;
+ return Bytes.compareTo(stopRowKey, 0, stopRowKey.length, rowKey, offset,
+ length) <= 0;
}
/**
diff --git a/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
index 967ad8eb2c2..3634a375ba5 100644
--- a/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
+++ b/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.mapred.JobConfigurable;
*/
public class TableInputFormat extends TableInputFormatBase implements
JobConfigurable {
- @SuppressWarnings("hiding")
private final Log LOG = LogFactory.getLog(TableInputFormat.class);
/**
diff --git a/src/java/org/apache/hadoop/hbase/util/Keying.java b/src/java/org/apache/hadoop/hbase/util/Keying.java
index a26f141c27e..49ed739ff5b 100644
--- a/src/java/org/apache/hadoop/hbase/util/Keying.java
+++ b/src/java/org/apache/hadoop/hbase/util/Keying.java
@@ -28,6 +28,7 @@ import java.util.regex.Pattern;
* Use fabricating row names or column qualifiers.
* TODO: Add createSchemeless key, a key that doesn't care if scheme is
* http or https.
+ * @see Bytes#split(byte[], byte[], int)
*/
public class Keying {
private static final String SCHEME = "r:";
diff --git a/src/test/org/apache/hadoop/hbase/client/TestGetRowVersions.java b/src/test/org/apache/hadoop/hbase/client/TestGetRowVersions.java
index 7c6c370d485..df48bb3ca7b 100644
--- a/src/test/org/apache/hadoop/hbase/client/TestGetRowVersions.java
+++ b/src/test/org/apache/hadoop/hbase/client/TestGetRowVersions.java
@@ -101,4 +101,4 @@ public class TestGetRowVersions extends HBaseClusterTestCase {
}
}
}
-}
+}
\ No newline at end of file
diff --git a/src/test/org/apache/hadoop/hbase/filter/DisabledTestRowFilterAfterWrite.java b/src/test/org/apache/hadoop/hbase/filter/DisabledTestRowFilterAfterWrite.java
index cf826eba122..e69de29bb2d 100644
--- a/src/test/org/apache/hadoop/hbase/filter/DisabledTestRowFilterAfterWrite.java
+++ b/src/test/org/apache/hadoop/hbase/filter/DisabledTestRowFilterAfterWrite.java
@@ -1,201 +0,0 @@
-/**
- * Copyright 2008 The Apache Software Foundation
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.filter;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-
-import junit.framework.Assert;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hbase.HBaseClusterTestCase;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Scanner;
-import org.apache.hadoop.hbase.io.BatchUpdate;
-import org.apache.hadoop.hbase.io.Cell;
-import org.apache.hadoop.hbase.io.RowResult;
-import org.apache.hadoop.hbase.util.Bytes;
-
-/** Test regexp filters HBASE-476 */
-public class DisabledTestRowFilterAfterWrite extends HBaseClusterTestCase {
-
- private static final Log LOG = LogFactory.getLog(DisabledTestRowFilterAfterWrite.class.getName());
-
- static final String TABLE_NAME = "TestTable";
- static final String FAMILY = "C:";
- static final String COLUMN1 = FAMILY + "col1";
- static final byte [] TEXT_COLUMN1 = Bytes.toBytes(COLUMN1);
- static final String COLUMN2 = FAMILY + "col2";
- static final byte [] TEXT_COLUMN2 = Bytes.toBytes(COLUMN2);
-
- private static final byte [][] columns = {
- TEXT_COLUMN1, TEXT_COLUMN2
- };
-
- private static final int NUM_ROWS = 10;
- private static final int VALUE_SIZE = 1000;
- private static final byte[] VALUE = new byte[VALUE_SIZE];
- private static final int COL_2_SIZE = 5;
- private static final int KEY_SIZE = 9;
- private static final int NUM_REWRITES = 10;
- static {
- Arrays.fill(VALUE, (byte) 'a');
- }
-
- /** constructor */
- public DisabledTestRowFilterAfterWrite() {
- super();
-
- // Make sure the cache gets flushed so we get multiple stores
- conf.setInt("hbase.hregion.memcache.flush.size", (NUM_ROWS * (VALUE_SIZE + COL_2_SIZE + KEY_SIZE)));
- LOG.info("memcach flush : " + conf.get("hbase.hregion.memcache.flush.size"));
- conf.setInt("hbase.regionserver.optionalcacheflushinterval", 100000000);
- // Avoid compaction to keep multiple stores
- conf.setInt("hbase.hstore.compactionThreshold", 10000);
-
- // Make lease timeout longer, lease checks less frequent
- conf.setInt("hbase.master.lease.period", 10 * 1000);
-
- // For debugging
- conf.setInt("hbase.regionserver.lease.period", 20 * 60 * 1000);
- conf.setInt("ipc.client.timeout", 20 * 60 * 1000);
- }
-
- @Override
- public void tearDown() throws Exception {
- super.tearDown();
- }
-
- /**
- * Test hbase mapreduce jobs against single region and multi-region tables.
- *
- * @throws IOException
- * @throws InterruptedException
- */
- public void testAfterWrite() throws IOException, InterruptedException {
- singleTableTest();
- }
-
- /*
- * Test against a single region. @throws IOException
- */
- private void singleTableTest() throws IOException, InterruptedException {
- HTableDescriptor desc = new HTableDescriptor(TABLE_NAME);
- desc.addFamily(new HColumnDescriptor(FAMILY));
-
- // Create a table.
- HBaseAdmin admin = new HBaseAdmin(this.conf);
- admin.createTable(desc);
-
- // insert some data into the test table
- HTable table = new HTable(conf, TABLE_NAME);
-
- for (int i = 0; i < NUM_ROWS; i++) {
- BatchUpdate b = new BatchUpdate("row_" + String.format("%1$05d", i));
-
- b.put(TEXT_COLUMN1, VALUE);
- b.put(TEXT_COLUMN2, String.format("%1$05d", i).getBytes());
- table.commit(b);
- }
-
- // LOG.info("Print table contents using scanner before map/reduce for " + TABLE_NAME);
- // scanTable(TABLE_NAME, false);
- // LOG.info("Print table contents using scanner+filter before map/reduce for " + TABLE_NAME);
- // scanTableWithRowFilter(TABLE_NAME, false);
-
- // Do some identity write operations on one column of the data.
- for (int n = 0; n < NUM_REWRITES; n++) {
- for (int i = 0; i < NUM_ROWS; i++) {
- BatchUpdate b = new BatchUpdate("row_" + String.format("%1$05d", i));
-
- b.put(TEXT_COLUMN2, String.format("%1$05d", i).getBytes());
- table.commit(b);
- }
- }
-
- // Wait for the flush to happen
- LOG.info("Waiting, for flushes to complete");
- Thread.sleep(5 * 1000);
- // Wait for the flush to happen
- LOG.info("Done. No flush should happen after this");
-
- // Do another round so to populate the mem cache
- for (int i = 0; i < NUM_ROWS; i++) {
- BatchUpdate b = new BatchUpdate("row_" + String.format("%1$05d", i));
- b.put(TEXT_COLUMN2, String.format("%1$05d", i).getBytes());
- table.commit(b);
- }
-
- LOG.info("Print table contents using scanner after map/reduce for " + TABLE_NAME);
- scanTable(TABLE_NAME, true);
- LOG.info("Print table contents using scanner+filter after map/reduce for " + TABLE_NAME);
- scanTableWithRowFilter(TABLE_NAME, true);
- }
-
- private void scanTable(final String tableName, final boolean printValues) throws IOException {
- HTable table = new HTable(conf, tableName);
-
- Scanner scanner = table.getScanner(columns, HConstants.EMPTY_START_ROW);
- int numFound = doScan(scanner, printValues);
- Assert.assertEquals(NUM_ROWS, numFound);
- }
-
- private void scanTableWithRowFilter(final String tableName, final boolean printValues) throws IOException {
- HTable table = new HTable(conf, tableName);
- Map columnMap = new HashMap();
- columnMap.put(TEXT_COLUMN1,
- new Cell(VALUE, HConstants.LATEST_TIMESTAMP));
- RegExpRowFilter filter = new RegExpRowFilter(null, columnMap);
- Scanner scanner = table.getScanner(columns, HConstants.EMPTY_START_ROW, filter);
- int numFound = doScan(scanner, printValues);
- Assert.assertEquals(NUM_ROWS, numFound);
- }
-
- private int doScan(final Scanner scanner, final boolean printValues) throws IOException {
- {
- int count = 0;
-
- try {
- for (RowResult result : scanner) {
- if (printValues) {
- LOG.info("row: " + Bytes.toString(result.getRow()));
- for (Map.Entry e : result.entrySet()) {
- LOG.info(" column: " + e.getKey() + " value: "
- + new String(e.getValue().getValue(), HConstants.UTF8_ENCODING));
- }
- }
- count++;
- }
-
- } finally {
- scanner.close();
- }
- return count;
- }
- }
-}
diff --git a/src/test/org/apache/hadoop/hbase/filter/DisabledTestColumnValueFilter.java b/src/test/org/apache/hadoop/hbase/filter/TestColumnValueFilter.java
similarity index 80%
rename from src/test/org/apache/hadoop/hbase/filter/DisabledTestColumnValueFilter.java
rename to src/test/org/apache/hadoop/hbase/filter/TestColumnValueFilter.java
index 95eb968de58..eb300070d2c 100644
--- a/src/test/org/apache/hadoop/hbase/filter/DisabledTestColumnValueFilter.java
+++ b/src/test/org/apache/hadoop/hbase/filter/TestColumnValueFilter.java
@@ -33,7 +33,7 @@ import junit.framework.TestCase;
/**
* Tests the stop row filter
*/
-public class DisabledTestColumnValueFilter extends TestCase {
+public class TestColumnValueFilter extends TestCase {
private static final byte[] ROW = Bytes.toBytes("test");
private static final byte[] COLUMN = Bytes.toBytes("test:foo");
@@ -65,26 +65,34 @@ public class DisabledTestColumnValueFilter extends TestCase {
private void basicFilterTests(RowFilterInterface filter)
throws Exception {
- assertTrue("basicFilter1", filter.filterColumn(ROW, COLUMN, VAL_1));
- assertFalse("basicFilter2", filter.filterColumn(ROW, COLUMN, VAL_2));
- assertFalse("basicFilter3", filter.filterColumn(ROW, COLUMN, VAL_3));
- assertFalse("basicFilter4", filter.filterColumn(ROW, COLUMN, VAL_4));
+ assertTrue("basicFilter1", filter.filterColumn(ROW, 0, ROW.length,
+ COLUMN, 0, COLUMN.length, VAL_1, 0, VAL_1.length));
+ assertFalse("basicFilter2", filter.filterColumn(ROW, 0, ROW.length,
+ COLUMN, 0, COLUMN.length, VAL_2, 0, VAL_2.length));
+ assertFalse("basicFilter3", filter.filterColumn(ROW, 0, ROW.length,
+ COLUMN, 0, COLUMN.length, VAL_3, 0, VAL_3.length));
+ assertFalse("basicFilter4", filter.filterColumn(ROW, 0, ROW.length,
+ COLUMN, 0, COLUMN.length, VAL_4, 0, VAL_4.length));
assertFalse("basicFilterAllRemaining", filter.filterAllRemaining());
assertFalse("basicFilterNotNull", filter.filterRow((List)null));
}
private void substrFilterTests(RowFilterInterface filter)
throws Exception {
- assertTrue("substrTrue", filter.filterColumn(ROW, COLUMN, FULLSTRING_1));
- assertFalse("substrFalse", filter.filterColumn(ROW, COLUMN, FULLSTRING_2));
+ assertTrue("substrTrue", filter.filterColumn(ROW, 0, ROW.length,
+ COLUMN, 0, COLUMN.length, FULLSTRING_1, 0, FULLSTRING_1.length));
+ assertFalse("substrFalse", filter.filterColumn(ROW, 0, ROW.length,
+ COLUMN, 0, COLUMN.length, FULLSTRING_2, 0, FULLSTRING_2.length));
assertFalse("substrFilterAllRemaining", filter.filterAllRemaining());
assertFalse("substrFilterNotNull", filter.filterRow((List)null));
}
private void regexFilterTests(RowFilterInterface filter)
throws Exception {
- assertTrue("regexTrue", filter.filterColumn(ROW, COLUMN, FULLSTRING_1));
- assertFalse("regexFalse", filter.filterColumn(ROW, COLUMN, FULLSTRING_2));
+ assertTrue("regexTrue", filter.filterColumn(ROW, 0, ROW.length,
+ COLUMN, 0, COLUMN.length, FULLSTRING_1, 0, FULLSTRING_1.length));
+ assertFalse("regexFalse", filter.filterColumn(ROW, 0, ROW.length,
+ COLUMN, 0, COLUMN.length, FULLSTRING_2, 0, FULLSTRING_2.length));
assertFalse("regexFilterAllRemaining", filter.filterAllRemaining());
assertFalse("regexFilterNotNull", filter.filterRow((List)null));
}
diff --git a/src/test/org/apache/hadoop/hbase/filter/DisabledTestStopRowFilter.java b/src/test/org/apache/hadoop/hbase/filter/TestStopRowFilter.java
similarity index 73%
rename from src/test/org/apache/hadoop/hbase/filter/DisabledTestStopRowFilter.java
rename to src/test/org/apache/hadoop/hbase/filter/TestStopRowFilter.java
index 969e0a0a187..97cc3171072 100644
--- a/src/test/org/apache/hadoop/hbase/filter/DisabledTestStopRowFilter.java
+++ b/src/test/org/apache/hadoop/hbase/filter/TestStopRowFilter.java
@@ -33,7 +33,7 @@ import junit.framework.TestCase;
/**
* Tests the stop row filter
*/
-public class DisabledTestStopRowFilter extends TestCase {
+public class TestStopRowFilter extends TestCase {
private final byte [] STOP_ROW = Bytes.toBytes("stop_row");
private final byte [] GOOD_ROW = Bytes.toBytes("good_row");
private final byte [] PAST_STOP_ROW = Bytes.toBytes("zzzzzz");
@@ -74,21 +74,25 @@ public class DisabledTestStopRowFilter extends TestCase {
// Ensure the serialization preserved the filter by running a full test.
stopRowTests(newFilter);
}
-
- private void stopRowTests(RowFilterInterface filter) throws Exception {
- assertFalse("Filtering on " + Bytes.toString(GOOD_ROW), filter.filterRowKey(GOOD_ROW));
- assertTrue("Filtering on " + Bytes.toString(STOP_ROW), filter.filterRowKey(STOP_ROW));
- assertTrue("Filtering on " + Bytes.toString(PAST_STOP_ROW), filter.filterRowKey(PAST_STOP_ROW));
-
- assertFalse("Filtering on " + Bytes.toString(GOOD_ROW), filter.filterColumn(GOOD_ROW, null,
- null));
- assertTrue("Filtering on " + Bytes.toString(STOP_ROW), filter.filterColumn(STOP_ROW, null, null));
- assertTrue("Filtering on " + Bytes.toString(PAST_STOP_ROW), filter.filterColumn(PAST_STOP_ROW,
- null, null));
+ private void stopRowTests(RowFilterInterface filter) throws Exception {
+ assertFalse("Filtering on " + Bytes.toString(GOOD_ROW),
+ filter.filterRowKey(GOOD_ROW, 0, GOOD_ROW.length));
+ assertTrue("Filtering on " + Bytes.toString(STOP_ROW),
+ filter.filterRowKey(STOP_ROW, 0, STOP_ROW.length));
+ assertTrue("Filtering on " + Bytes.toString(PAST_STOP_ROW),
+ filter.filterRowKey(PAST_STOP_ROW, 0, PAST_STOP_ROW.length));
+ assertFalse("Filtering on " + Bytes.toString(GOOD_ROW),
+ filter.filterColumn(GOOD_ROW, 0, GOOD_ROW.length, null, 0, 0,
+ null, 0, 0));
+ assertTrue("Filtering on " + Bytes.toString(STOP_ROW),
+ filter.filterColumn(STOP_ROW, 0, STOP_ROW.length, null, 0, 0, null, 0, 0));
+ assertTrue("Filtering on " + Bytes.toString(PAST_STOP_ROW),
+ filter.filterColumn(PAST_STOP_ROW, 0, PAST_STOP_ROW.length, null, 0, 0,
+ null, 0, 0));
assertFalse("FilterAllRemaining", filter.filterAllRemaining());
assertFalse("FilterNotNull", filter.filterRow((List)null));
- assertFalse("Filter a null", filter.filterRowKey(null));
+ assertFalse("Filter a null", filter.filterRowKey(null, 0, 0));
}
}
\ No newline at end of file
diff --git a/src/test/org/apache/hadoop/hbase/filter/DisabledTestWhileMatchRowFilter.java b/src/test/org/apache/hadoop/hbase/filter/TestWhileMatchRowFilter.java
similarity index 82%
rename from src/test/org/apache/hadoop/hbase/filter/DisabledTestWhileMatchRowFilter.java
rename to src/test/org/apache/hadoop/hbase/filter/TestWhileMatchRowFilter.java
index e84f2692679..146e474d306 100644
--- a/src/test/org/apache/hadoop/hbase/filter/DisabledTestWhileMatchRowFilter.java
+++ b/src/test/org/apache/hadoop/hbase/filter/TestWhileMatchRowFilter.java
@@ -34,7 +34,7 @@ import junit.framework.TestCase;
/**
* Tests for the while-match filter
*/
-public class DisabledTestWhileMatchRowFilter extends TestCase {
+public class TestWhileMatchRowFilter extends TestCase {
WhileMatchRowFilter wmStopRowFilter;
WhileMatchRowFilter wmRegExpRowFilter;
@@ -93,14 +93,16 @@ public class DisabledTestWhileMatchRowFilter extends TestCase {
// Test cases that should pass the row
toTest = "apples";
assertFalse("filter: '" + toTest + "'", filter.filterRowKey(Bytes.toBytes(toTest)));
- assertFalse("innerFilter: '" + toTest + "'", innerFilter.filterRowKey(Bytes.toBytes(
- toTest)));
-
+ byte [] toTestBytes = Bytes.toBytes(toTest);
+ assertFalse("innerFilter: '" + toTest + "'",
+ innerFilter.filterRowKey(toTestBytes, 0, toTestBytes.length));
+
// Test cases that should fail the row
toTest = "tuna";
- assertTrue("filter: '" + toTest + "'", filter.filterRowKey(Bytes.toBytes(toTest)));
- assertTrue("innerFilter: '" + toTest + "'", innerFilter.filterRowKey(Bytes.toBytes(
- toTest)));
+ toTestBytes = Bytes.toBytes(toTest);
+ assertTrue("filter: '" + toTest + "'", filter.filterRowKey(toTestBytes));
+ assertTrue("innerFilter: '" + toTest + "'",
+ innerFilter.filterRowKey(toTestBytes, 0, toTestBytes.length));
// The difference in switch
assertTrue("filter: filterAllRemaining", filter.filterAllRemaining());
@@ -123,15 +125,17 @@ public class DisabledTestWhileMatchRowFilter extends TestCase {
// Test cases that should pass the row
toTest = "regex_match";
+ byte [] toTestBytes = Bytes.toBytes(toTest);
assertFalse("filter: '" + toTest + "'", filter.filterRowKey(Bytes.toBytes(toTest)));
- assertFalse("innerFilter: '" + toTest + "'", innerFilter.filterRowKey(Bytes.toBytes(
- toTest)));
+ assertFalse("innerFilter: '" + toTest + "'",
+ innerFilter.filterRowKey(toTestBytes, 0, toTestBytes.length));
// Test cases that should fail the row
toTest = "not_a_match";
+ toTestBytes = Bytes.toBytes(toTest);
assertTrue("filter: '" + toTest + "'", filter.filterRowKey(Bytes.toBytes(toTest)));
- assertTrue("innerFilter: '" + toTest + "'", innerFilter.filterRowKey(Bytes.toBytes(
- toTest)));
+ assertTrue("innerFilter: '" + toTest + "'",
+ innerFilter.filterRowKey(toTestBytes, 0, toTestBytes.length));
// The difference in switch
assertTrue("filter: filterAllRemaining", filter.filterAllRemaining());
@@ -145,7 +149,9 @@ public class DisabledTestWhileMatchRowFilter extends TestCase {
// Test filter(Text, Text, byte[]) for functionality only (no switch-cases)
toTest = "asdf_regex_hjkl";
- assertFalse("filter: '" + toTest + "'", filter.filterColumn(Bytes.toBytes(toTest),
- null, null));
+ toTestBytes = Bytes.toBytes(toTest);
+ assertFalse("filter: '" + toTest + "'",
+ filter.filterColumn(toTestBytes, 0, toTestBytes.length,
+ null, 0, 0, null, 0, 0));
}
}
diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java b/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
index 1ed536c659d..4c7b8ae214a 100644
--- a/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
+++ b/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java
@@ -90,6 +90,53 @@ public class TestScanner extends HBaseTestCase {
}
+ /**
+ * Test basic stop row filter works.
+ * @throws Exception
+ */
+ public void testStopRow() throws Exception {
+ byte [] startrow = Bytes.toBytes("bbb");
+ byte [] stoprow = Bytes.toBytes("ccc");
+ try {
+ this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null);
+ addContent(this.r, HConstants.COLUMN_FAMILY);
+ List results = new ArrayList();
+ // Do simple test of getting one row only first.
+ InternalScanner s = r.getScanner(HConstants.COLUMN_FAMILY_ARRAY,
+ Bytes.toBytes("abc"), HConstants.LATEST_TIMESTAMP,
+ new WhileMatchRowFilter(new StopRowFilter(Bytes.toBytes("abd"))));
+ int count = 0;
+ while (s.next(results)) {
+ count++;
+ }
+ s.close();
+ assertEquals(1, count);
+ // Now do something a bit more imvolved.
+ s = r.getScanner(HConstants.COLUMN_FAMILY_ARRAY,
+ startrow, HConstants.LATEST_TIMESTAMP,
+ new WhileMatchRowFilter(new StopRowFilter(stoprow)));
+ count = 0;
+ KeyValue kv = null;
+ results = new ArrayList();
+ for (boolean first = true; s.next(results);) {
+ kv = results.get(0);
+ if (first) {
+ assertTrue(Bytes.BYTES_COMPARATOR.compare(startrow, kv.getRow()) == 0);
+ first = false;
+ }
+ count++;
+ }
+ assertTrue(Bytes.BYTES_COMPARATOR.compare(stoprow, kv.getRow()) > 0);
+ // We got something back.
+ assertTrue(count > 10);
+ s.close();
+ } finally {
+ this.r.close();
+ this.r.getLog().closeAndDelete();
+ shutdownDfs(this.cluster);
+ }
+ }
+
/** The test!
* @throws IOException
*/
@@ -227,7 +274,6 @@ public class TestScanner extends HBaseTestCase {
throws IOException {
InternalScanner scanner = null;
List results = new ArrayList();
-
byte [][][] scanColumns = {
COLS,
EXPLICIT_COLS
@@ -238,27 +284,26 @@ public class TestScanner extends HBaseTestCase {
scanner = r.getScanner(scanColumns[i], FIRST_ROW,
System.currentTimeMillis(), null);
while (scanner.next(results)) {
- // FIX!!!
-// assertTrue(results.containsKey(HConstants.COL_REGIONINFO));
-// byte [] val = results.get(HConstants.COL_REGIONINFO).getValue();
-// validateRegionInfo(val);
-// if(validateStartcode) {
-// assertTrue(results.containsKey(HConstants.COL_STARTCODE));
-// val = results.get(HConstants.COL_STARTCODE).getValue();
-// assertNotNull(val);
-// assertFalse(val.length == 0);
-// long startCode = Bytes.toLong(val);
-// assertEquals(START_CODE, startCode);
-// }
-//
-// if(serverName != null) {
-// assertTrue(results.containsKey(HConstants.COL_SERVER));
-// val = results.get(HConstants.COL_SERVER).getValue();
-// assertNotNull(val);
-// assertFalse(val.length == 0);
-// String server = Bytes.toString(val);
-// assertEquals(0, server.compareTo(serverName));
-// }
+ assertTrue(hasColumn(results, HConstants.COL_REGIONINFO));
+ byte [] val = getColumn(results, HConstants.COL_REGIONINFO).getValue();
+ validateRegionInfo(val);
+ if(validateStartcode) {
+ assertTrue(hasColumn(results, HConstants.COL_STARTCODE));
+ val = getColumn(results, HConstants.COL_STARTCODE).getValue();
+ assertNotNull(val);
+ assertFalse(val.length == 0);
+ long startCode = Bytes.toLong(val);
+ assertEquals(START_CODE, startCode);
+ }
+
+ if(serverName != null) {
+ assertTrue(hasColumn(results, HConstants.COL_SERVER));
+ val = getColumn(results, HConstants.COL_SERVER).getValue();
+ assertNotNull(val);
+ assertFalse(val.length == 0);
+ String server = Bytes.toString(val);
+ assertEquals(0, server.compareTo(serverName));
+ }
results.clear();
}
@@ -272,47 +317,30 @@ public class TestScanner extends HBaseTestCase {
}
}
+ private boolean hasColumn(final List kvs, final byte [] column) {
+ for (KeyValue kv: kvs) {
+ if (kv.matchingColumn(column)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private KeyValue getColumn(final List kvs, final byte [] column) {
+ for (KeyValue kv: kvs) {
+ if (kv.matchingColumn(column)) {
+ return kv;
+ }
+ }
+ return null;
+ }
+
/** Use get to retrieve the HRegionInfo and validate it */
private void getRegionInfo() throws IOException {
byte [] bytes = region.get(ROW_KEY, HConstants.COL_REGIONINFO).getValue();
validateRegionInfo(bytes);
}
- /**
- * Test basic stop row filter works.
- * @throws Exception
- */
- public void testStopRow() throws Exception {
- byte [] startrow = Bytes.toBytes("bbb");
- byte [] stoprow = Bytes.toBytes("ccc");
- try {
- this.r = createNewHRegion(REGION_INFO.getTableDesc(), null, null);
- addContent(this.r, HConstants.COLUMN_FAMILY);
- InternalScanner s = r.getScanner(HConstants.COLUMN_FAMILY_ARRAY,
- startrow, HConstants.LATEST_TIMESTAMP,
- new WhileMatchRowFilter(new StopRowFilter(stoprow)));
- List results = new ArrayList();
- int count = 0;
- KeyValue kv = null;
- for (boolean first = true; s.next(results);) {
- kv = results.get(0);
- if (first) {
- assertTrue(Bytes.BYTES_COMPARATOR.compare(startrow, kv.getRow()) == 0);
- first = false;
- }
- count++;
- }
- assertTrue(Bytes.BYTES_COMPARATOR.compare(stoprow, kv.getRow()) > 0);
- // We got something back.
- assertTrue(count > 10);
- s.close();
- } finally {
- this.r.close();
- this.r.getLog().closeAndDelete();
- shutdownDfs(this.cluster);
- }
- }
-
/**
* HBase-910.
* @throws Exception
diff --git a/src/test/org/apache/hadoop/hbase/util/TestBytes.java b/src/test/org/apache/hadoop/hbase/util/TestBytes.java
index 79df4d5be60..8272ddab3df 100644
--- a/src/test/org/apache/hadoop/hbase/util/TestBytes.java
+++ b/src/test/org/apache/hadoop/hbase/util/TestBytes.java
@@ -54,7 +54,7 @@ public class TestBytes extends TestCase {
for (int i = 0; i < parts.length; i++) {
System.out.println(Bytes.toString(parts[i]));
}
- assertEquals(2, parts.length);
+ assertEquals(3, parts.length);
assertTrue(Bytes.equals(parts[1], middle));
}