From a6249ee3b4251f5416217269c80e201057fbda88 Mon Sep 17 00:00:00 2001 From: Zhihong Yu Date: Thu, 22 Sep 2011 21:55:00 +0000 Subject: [PATCH] HBASE-4449 LoadIncrementalHFiles should be able to handle CFs with blooms (David Revell) git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1174403 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 2 + .../mapreduce/TestLoadIncrementalHFiles.java | 37 ++++++++++++++++--- 2 files changed, 34 insertions(+), 5 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index d12e0d496ed..6f2a15be505 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -295,6 +295,8 @@ Release 0.91.0 - Unreleased TESTS HBASE-4450 test for number of blocks read: to serve as baseline for expected blocks read and for catching regressions (Kannan) + HBASE-4449 LoadIncrementalHFiles should be able to handle CFs with blooms + (David Revell) IMPROVEMENTS HBASE-3290 Max Compaction Size (Nicolas Spiegelberg via Stack) HBASE-3292 Expose block cache hit/miss/evict counts into region server diff --git a/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java index caed95d7064..01f06e18916 100644 --- a/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java +++ b/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFileScanner; +import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; @@ -70,7 +71,7 @@ public class TestLoadIncrementalHFiles { */ @Test public void testSimpleLoad() throws Exception { - runTest("testSimpleLoad", + runTest("testSimpleLoad", BloomType.NONE, new byte[][][] { new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, new byte[][]{ Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, @@ -83,15 +84,39 @@ public class TestLoadIncrementalHFiles { */ @Test public void testRegionCrossingLoad() throws Exception { - runTest("testRegionCrossingLoad", + runTest("testRegionCrossingLoad", BloomType.NONE, new byte[][][] { new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); } - private void runTest(String testName, byte[][][] hfileRanges) - throws Exception { + /** + * Test loading into a column family that has a ROW bloom filter. + */ + @Test + public void testRegionCrossingRowBloom() throws Exception { + runTest("testRegionCrossingLoadRowBloom", BloomType.ROW, + new byte[][][] { + new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, + new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, + }); + } + + /** + * Test loading into a column family that has a ROWCOL bloom filter. + */ + @Test + public void testRegionCrossingRowColBloom() throws Exception { + runTest("testRegionCrossingLoadRowColBloom", BloomType.ROWCOL, + new byte[][][] { + new byte[][]{ Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, + new byte[][]{ Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, + }); + } + + private void runTest(String testName, BloomType bloomType, + byte[][][] hfileRanges) throws Exception { Path dir = HBaseTestingUtility.getTestDir(testName); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs); @@ -111,7 +136,9 @@ public class TestLoadIncrementalHFiles { try { HBaseAdmin admin = new HBaseAdmin(util.getConfiguration()); HTableDescriptor htd = new HTableDescriptor(TABLE); - htd.addFamily(new HColumnDescriptor(FAMILY)); + HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY); + familyDesc.setBloomFilterType(bloomType); + htd.addFamily(familyDesc); admin.createTable(htd, SPLIT_KEYS); HTable table = new HTable(util.getConfiguration(), TABLE);