diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java index c23c05c2c6e..d32165eb213 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.*; +import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -51,7 +51,6 @@ import org.apache.hadoop.hbase.io.hfile.TestHFileWriterV2; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogFactory; import org.apache.hadoop.hbase.util.Bytes; - import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -62,7 +61,7 @@ import org.junit.runners.Parameterized.Parameters; /** * Tests {@link HFile} cache-on-write functionality for data blocks, non-root - * index blocks, and Bloom filter blocks, as specified by the column family. + * index blocks, and Bloom filter blocks, as specified by the column family. */ @RunWith(Parameterized.class) @Category(MediumTests.class) @@ -119,7 +118,9 @@ public class TestCacheOnWriteInSchema { private final CacheOnWriteType cowType; private Configuration conf; private final String testDescription; + private HRegion region; private HStore store; + private HLog hlog; private FileSystem fs; public TestCacheOnWriteInSchema(CacheOnWriteType cowType) { @@ -161,18 +162,35 @@ public class TestCacheOnWriteInSchema { fs.delete(logdir, true); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); - HLog hlog = HLogFactory.createHLog(fs, basedir, logName, conf); - - HRegion region = new HRegion(basedir, hlog, fs, conf, info, htd, null); + hlog = HLogFactory.createHLog(fs, basedir, logName, conf); + + region = new HRegion(basedir, hlog, fs, conf, info, htd, null); store = new HStore(basedir, region, hcd, fs, conf); } @After - public void tearDown() { + public void tearDown() throws IOException { + IOException ex = null; + try { + region.close(); + } catch (IOException e) { + LOG.warn("Caught Exception", e); + ex = e; + } + try { + hlog.closeAndDelete(); + } catch (IOException e) { + LOG.warn("Caught Exception", e); + ex = e; + } try { fs.delete(new Path(DIR), true); } catch (IOException e) { LOG.error("Could not delete " + DIR, e); + ex = e; + } + if (ex != null) { + throw ex; } } @@ -188,7 +206,7 @@ public class TestCacheOnWriteInSchema { } private void readStoreFile(Path path) throws IOException { - CacheConfig cacheConf = store.getCacheConfig(); + CacheConfig cacheConf = store.getCacheConfig(); BlockCache cache = cacheConf.getBlockCache(); StoreFile sf = new StoreFile(fs, path, conf, cacheConf, BloomType.ROWCOL, null); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java index 03b1acc51c8..4c60418e32e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java @@ -25,21 +25,26 @@ import java.util.GregorianCalendar; import java.util.List; import junit.framework.TestCase; -import org.junit.experimental.categories.Category; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.SmallTests; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder; -import org.apache.hadoop.hbase.regionserver.compactions.*; +import org.apache.hadoop.hbase.regionserver.compactions.CompactionPolicy; import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLogFactory; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; +import org.junit.After; +import org.junit.experimental.categories.Category; import com.google.common.collect.Lists; @@ -61,6 +66,8 @@ public class TestDefaultCompactSelection extends TestCase { protected static final long minSize = 10; protected static final long maxSize = 1000; + private HLog hlog; + private HRegion region; @Override public void setUp() throws Exception { @@ -87,9 +94,9 @@ public class TestDefaultCompactSelection extends TestCase { htd.addFamily(hcd); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); - HLog hlog = HLogFactory.createHLog(fs, basedir, + hlog = HLogFactory.createHLog(fs, basedir, logName, conf); - HRegion region = HRegion.createHRegion(info, basedir, conf, htd); + region = HRegion.createHRegion(info, basedir, conf, htd); HRegion.closeHRegion(region); Path tableDir = new Path(basedir, Bytes.toString(htd.getName())); region = new HRegion(tableDir, hlog, fs, conf, info, htd, null); @@ -101,6 +108,26 @@ public class TestDefaultCompactSelection extends TestCase { fs.create(TEST_FILE); } + @After + public void tearDown() throws IOException { + IOException ex = null; + try { + region.close(); + } catch (IOException e) { + LOG.warn("Caught Exception", e); + ex = e; + } + try { + hlog.closeAndDelete(); + } catch (IOException e) { + LOG.warn("Caught Exception", e); + ex = e; + } + if (ex != null) { + throw ex; + } + } + // used so our tests don't deal with actual StoreFiles static class MockStoreFile extends StoreFile { long length = 0;