diff --git a/CHANGES.txt b/CHANGES.txt index 92136071899..1ebb6f493cd 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -257,6 +257,7 @@ Release 0.20.0 - Unreleased HBASE-1644 Result.row is cached in getRow; this breaks MapReduce (Doğacan Güney via Stack) HBASE-1639 clean checkout with empty hbase-site.xml, zk won't start + HBASE-1646 Scan-s can't set a Filter (Doğacan Güney via Stack) IMPROVEMENTS HBASE-1089 Add count of regions on filesystem to master UI; add percentage diff --git a/src/java/org/apache/hadoop/hbase/client/Scan.java b/src/java/org/apache/hadoop/hbase/client/Scan.java index baa82d6d2e2..d4972ad36e7 100644 --- a/src/java/org/apache/hadoop/hbase/client/Scan.java +++ b/src/java/org/apache/hadoop/hbase/client/Scan.java @@ -25,10 +25,10 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.RowFilterInterface; -import org.apache.hadoop.hbase.io.HbaseObjectWritable; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.WritableFactories; import java.io.DataInput; import java.io.DataOutput; @@ -476,21 +476,31 @@ public class Scan implements Writable { return sb.toString(); } + @SuppressWarnings("unchecked") + private Writable createForName(String className) { + try { + Class clazz = + (Class) Class.forName(className); + return WritableFactories.newInstance(clazz, new Configuration()); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Can't find class " + className); + } + } + //Writable public void readFields(final DataInput in) throws IOException { this.startRow = Bytes.readByteArray(in); this.stopRow = Bytes.readByteArray(in); this.maxVersions = in.readInt(); - boolean hasFilter = in.readBoolean(); - if(hasFilter) { - this.filter = (Filter)HbaseObjectWritable.readObject(in, - new Configuration()); + if(in.readBoolean()) { + this.filter = (Filter)createForName(Bytes.toString(Bytes.readByteArray(in))); + this.filter.readFields(in); } - boolean hasOldFilter = in.readBoolean(); - if (hasOldFilter) { - this.oldFilter = (RowFilterInterface)HbaseObjectWritable.readObject(in, - new Configuration()); + if (in.readBoolean()) { + this.oldFilter = + (RowFilterInterface)createForName(Bytes.toString(Bytes.readByteArray(in))); + this.oldFilter.readFields(in); } this.tr = new TimeRange(); tr.readFields(in); @@ -518,15 +528,15 @@ public class Scan implements Writable { out.writeBoolean(false); } else { out.writeBoolean(true); - HbaseObjectWritable.writeObject(out, this.filter, - Filter.class, null); + Bytes.writeByteArray(out, Bytes.toBytes(filter.getClass().getName())); + filter.write(out); } if (this.oldFilter == null) { out.writeBoolean(false); } else { out.writeBoolean(true); - HbaseObjectWritable.writeObject(out, this.oldFilter, - RowFilterInterface.class, null); + Bytes.writeByteArray(out, Bytes.toBytes(oldFilter.getClass().getName())); + oldFilter.write(out); } tr.write(out); out.writeInt(familyMap.size()); diff --git a/src/test/org/apache/hadoop/hbase/TestSerialization.java b/src/test/org/apache/hadoop/hbase/TestSerialization.java index 556f1bf1847..2fbc5142f7b 100644 --- a/src/test/org/apache/hadoop/hbase/TestSerialization.java +++ b/src/test/org/apache/hadoop/hbase/TestSerialization.java @@ -22,12 +22,11 @@ package org.apache.hadoop.hbase; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.NavigableSet; +import java.util.Set; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; @@ -35,6 +34,8 @@ import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.RowLock; import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.filter.Filter; +import org.apache.hadoop.hbase.filter.PrefixFilter; import org.apache.hadoop.hbase.io.BatchOperation; import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.Cell; @@ -44,7 +45,6 @@ import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Writables; import org.apache.hadoop.io.DataInputBuffer; -import org.apache.hadoop.io.Writable; /** * Test HBase Writables serializations @@ -370,6 +370,15 @@ public class TestSerialization extends HBaseTestCase { for(byte[] column : set){ assertTrue(desSet.contains(column)); } + + // Test filters are serialized properly. + scan = new Scan(startRow); + byte [] prefix = Bytes.toBytes(getName()); + scan.setFilter(new PrefixFilter(prefix)); + sb = Writables.getBytes(scan); + desScan = (Scan)Writables.getWritable(sb, new Scan()); + Filter f = desScan.getFilter(); + assertTrue(f instanceof PrefixFilter); } assertEquals(scan.getMaxVersions(), desScan.getMaxVersions());