HADOOP-2299 Support inclusive scans

git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@601005 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2007-12-04 17:07:22 +00:00
parent c5eee12d4c
commit 0d7a01cb8a
4 changed files with 143 additions and 1 deletions

View File

@ -87,6 +87,7 @@ Trunk (unreleased changes)
(Bryan Duxbury via Stack) (Bryan Duxbury via Stack)
HADOOP-2339 Delete command with no WHERE clause HADOOP-2339 Delete command with no WHERE clause
(Edward Yoon via Stack) (Edward Yoon via Stack)
HADOOP-2299 Support inclusive scans (Bryan Duxbury via Stack)
Release 0.15.1 Release 0.15.1

View File

@ -0,0 +1,48 @@
package org.apache.hadoop.hbase.filter;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.TreeMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
/*
* Subclass of StopRowFilter that filters rows > the stop row,
* making it include up to the last row but no further.
*/
public class InclusiveStopRowFilter extends StopRowFilter{
/**
* Default constructor, filters nothing. Required though for RPC
* deserialization.
*/
public InclusiveStopRowFilter() {super();}
/**
* Constructor that takes a stopRowKey on which to filter
*
* @param stopRowKey rowKey to filter on.
*/
public InclusiveStopRowFilter(final Text stopRowKey) {
super(stopRowKey);
}
public boolean filter(final Text rowKey) {
if (rowKey == null) {
if (this.stopRowKey == null) {
return true;
}
return false;
}
boolean result = this.stopRowKey.compareTo(rowKey) < 0;
if (LOG.isDebugEnabled()) {
LOG.debug("Filter result for rowKey: " + rowKey + ". Result: " +
result);
}
return result;
}
}

View File

@ -34,7 +34,7 @@ import org.apache.hadoop.io.Text;
*/ */
public class StopRowFilter implements RowFilterInterface { public class StopRowFilter implements RowFilterInterface {
private Text stopRowKey; protected Text stopRowKey;
static final Log LOG = LogFactory.getLog(StopRowFilter.class); static final Log LOG = LogFactory.getLog(StopRowFilter.class);

View File

@ -0,0 +1,93 @@
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.filter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import org.apache.hadoop.io.Text;
import junit.framework.TestCase;
/**
* Tests the inclusive stop row filter
*/
public class TestInclusiveStopRowFilter extends TestCase {
private final Text STOP_ROW = new Text("stop_row");
private final Text GOOD_ROW = new Text("good_row");
private final Text PAST_STOP_ROW = new Text("zzzzzz");
RowFilterInterface mainFilter;
/** {@inheritDoc} */
@Override
protected void setUp() throws Exception {
super.setUp();
mainFilter = new InclusiveStopRowFilter(STOP_ROW);
}
/**
* Tests identification of the stop row
* @throws Exception
*/
public void testStopRowIdentification() throws Exception {
stopRowTests(mainFilter);
}
/**
* Tests serialization
* @throws Exception
*/
public void testSerialization() throws Exception {
// Decompose mainFilter to bytes.
ByteArrayOutputStream stream = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(stream);
mainFilter.write(out);
out.close();
byte[] buffer = stream.toByteArray();
// Recompose mainFilter.
DataInputStream in = new DataInputStream(new ByteArrayInputStream(buffer));
RowFilterInterface newFilter = new InclusiveStopRowFilter();
newFilter.readFields(in);
// Ensure the serialization preserved the filter by running a full test.
stopRowTests(newFilter);
}
private void stopRowTests(RowFilterInterface filter) throws Exception {
assertFalse("Filtering on " + GOOD_ROW, filter.filter(GOOD_ROW));
assertFalse("Filtering on " + STOP_ROW, filter.filter(STOP_ROW));
assertTrue("Filtering on " + PAST_STOP_ROW, filter.filter(PAST_STOP_ROW));
assertFalse("Filtering on " + GOOD_ROW, filter.filter(GOOD_ROW, null,
null));
assertFalse("Filtering on " + STOP_ROW, filter.filter(STOP_ROW, null, null));
assertTrue("Filtering on " + PAST_STOP_ROW, filter.filter(PAST_STOP_ROW,
null, null));
assertFalse("FilterAllRemaining", filter.filterAllRemaining());
assertFalse("FilterNotNull", filter.filterNotNull(null));
assertFalse("Filter a null", filter.filter(null));
}
}