HBASE-1830 HbaseObjectWritable methods should allow null HBCs for when Writable is not Configurable (Stack via jgray)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@816020 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Gray 2009-09-17 01:17:18 +00:00
parent b37eca06af
commit b59e375a5c
3 changed files with 18 additions and 6 deletions

View File

@ -28,6 +28,8 @@ Release 0.21.0 - Unreleased
HBASE-1821 Filtering by SingleColumnValueFilter bug HBASE-1821 Filtering by SingleColumnValueFilter bug
HBASE-1840 RowLock fails when used with IndexTable (Keith Thomas via Stack) HBASE-1840 RowLock fails when used with IndexTable (Keith Thomas via Stack)
HBASE-818 HFile code review and refinement (Schubert Zhang via Stack) HBASE-818 HFile code review and refinement (Schubert Zhang via Stack)
HBASE-1830 HbaseObjectWritable methods should allow null HBCs
for when Writable is not Configurable (Stack via jgray)
IMPROVEMENTS IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable HBASE-1760 Cleanup TODOs in HTable

View File

@ -27,6 +27,7 @@ import java.util.Arrays;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.ObjectWritable;
/** /**
@ -126,17 +127,15 @@ public abstract class CompareFilter implements Filter {
} }
} }
private static final HBaseConfiguration hbc = new HBaseConfiguration();
public void readFields(DataInput in) throws IOException { public void readFields(DataInput in) throws IOException {
compareOp = CompareOp.valueOf(in.readUTF()); compareOp = CompareOp.valueOf(in.readUTF());
comparator = (WritableByteArrayComparable) comparator = (WritableByteArrayComparable)
ObjectWritable.readObject(in, hbc); HbaseObjectWritable.readObject(in, null);
} }
public void write(DataOutput out) throws IOException { public void write(DataOutput out) throws IOException {
out.writeUTF(compareOp.name()); out.writeUTF(compareOp.name());
ObjectWritable.writeObject(out, comparator, HbaseObjectWritable.writeObject(out, comparator,
WritableByteArrayComparable.class, hbc); WritableByteArrayComparable.class, null);
} }
} }

View File

@ -34,8 +34,11 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowLock; import org.apache.hadoop.hbase.client.RowLock;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -57,6 +60,14 @@ public class TestSerialization extends HBaseTestCase {
super.tearDown(); super.tearDown();
} }
public void testCompareFilter() throws Exception {
Filter f = new RowFilter(CompareOp.EQUAL,
new BinaryComparator(Bytes.toBytes("testRowOne-2")));
byte [] bytes = Writables.getBytes(f);
Filter ff = (Filter)Writables.getWritable(bytes, new RowFilter());
assertNotNull(ff);
}
public void testKeyValue() throws Exception { public void testKeyValue() throws Exception {
byte [] row = Bytes.toBytes(getName()); byte [] row = Bytes.toBytes(getName());
byte [] family = Bytes.toBytes(getName()); byte [] family = Bytes.toBytes(getName());