HBASE-1646 Scan-s can't set a Filter

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@793431 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-07-12 23:21:26 +00:00
parent 67f428cc42
commit c493cd3bcc
3 changed files with 36 additions and 16 deletions

View File

@ -257,6 +257,7 @@ Release 0.20.0 - Unreleased
HBASE-1644 Result.row is cached in getRow; this breaks MapReduce HBASE-1644 Result.row is cached in getRow; this breaks MapReduce
(Doğacan Güney via Stack) (Doğacan Güney via Stack)
HBASE-1639 clean checkout with empty hbase-site.xml, zk won't start HBASE-1639 clean checkout with empty hbase-site.xml, zk won't start
HBASE-1646 Scan-s can't set a Filter (Doğacan Güney via Stack)
IMPROVEMENTS IMPROVEMENTS
HBASE-1089 Add count of regions on filesystem to master UI; add percentage HBASE-1089 Add count of regions on filesystem to master UI; add percentage

View File

@ -25,10 +25,10 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.filter.RowFilterInterface;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
@ -476,21 +476,31 @@ public class Scan implements Writable {
return sb.toString(); return sb.toString();
} }
@SuppressWarnings("unchecked")
private Writable createForName(String className) {
try {
Class<? extends Writable> clazz =
(Class<? extends Writable>) Class.forName(className);
return WritableFactories.newInstance(clazz, new Configuration());
} catch (ClassNotFoundException e) {
throw new RuntimeException("Can't find class " + className);
}
}
//Writable //Writable
public void readFields(final DataInput in) public void readFields(final DataInput in)
throws IOException { throws IOException {
this.startRow = Bytes.readByteArray(in); this.startRow = Bytes.readByteArray(in);
this.stopRow = Bytes.readByteArray(in); this.stopRow = Bytes.readByteArray(in);
this.maxVersions = in.readInt(); this.maxVersions = in.readInt();
boolean hasFilter = in.readBoolean(); if(in.readBoolean()) {
if(hasFilter) { this.filter = (Filter)createForName(Bytes.toString(Bytes.readByteArray(in)));
this.filter = (Filter)HbaseObjectWritable.readObject(in, this.filter.readFields(in);
new Configuration());
} }
boolean hasOldFilter = in.readBoolean(); if (in.readBoolean()) {
if (hasOldFilter) { this.oldFilter =
this.oldFilter = (RowFilterInterface)HbaseObjectWritable.readObject(in, (RowFilterInterface)createForName(Bytes.toString(Bytes.readByteArray(in)));
new Configuration()); this.oldFilter.readFields(in);
} }
this.tr = new TimeRange(); this.tr = new TimeRange();
tr.readFields(in); tr.readFields(in);
@ -518,15 +528,15 @@ public class Scan implements Writable {
out.writeBoolean(false); out.writeBoolean(false);
} else { } else {
out.writeBoolean(true); out.writeBoolean(true);
HbaseObjectWritable.writeObject(out, this.filter, Bytes.writeByteArray(out, Bytes.toBytes(filter.getClass().getName()));
Filter.class, null); filter.write(out);
} }
if (this.oldFilter == null) { if (this.oldFilter == null) {
out.writeBoolean(false); out.writeBoolean(false);
} else { } else {
out.writeBoolean(true); out.writeBoolean(true);
HbaseObjectWritable.writeObject(out, this.oldFilter, Bytes.writeByteArray(out, Bytes.toBytes(oldFilter.getClass().getName()));
RowFilterInterface.class, null); oldFilter.write(out);
} }
tr.write(out); tr.write(out);
out.writeInt(familyMap.size()); out.writeInt(familyMap.size());

View File

@ -22,12 +22,11 @@ package org.apache.hadoop.hbase;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.NavigableSet; import java.util.NavigableSet;
import java.util.Set;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
@ -35,6 +34,8 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowLock; import org.apache.hadoop.hbase.client.RowLock;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.io.BatchOperation; import org.apache.hadoop.hbase.io.BatchOperation;
import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.io.Cell; import org.apache.hadoop.hbase.io.Cell;
@ -44,7 +45,6 @@ import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.Writable;
/** /**
* Test HBase Writables serializations * Test HBase Writables serializations
@ -370,6 +370,15 @@ public class TestSerialization extends HBaseTestCase {
for(byte[] column : set){ for(byte[] column : set){
assertTrue(desSet.contains(column)); assertTrue(desSet.contains(column));
} }
// Test filters are serialized properly.
scan = new Scan(startRow);
byte [] prefix = Bytes.toBytes(getName());
scan.setFilter(new PrefixFilter(prefix));
sb = Writables.getBytes(scan);
desScan = (Scan)Writables.getWritable(sb, new Scan());
Filter f = desScan.getFilter();
assertTrue(f instanceof PrefixFilter);
} }
assertEquals(scan.getMaxVersions(), desScan.getMaxVersions()); assertEquals(scan.getMaxVersions(), desScan.getMaxVersions());