HBASE-6829. [WINDOWS] Tests should ensure that HLog is closed

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1437647 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Enis Soztutar 2013-01-23 19:37:15 +00:00
parent 73d6cbb4cf
commit 669626b15b
2 changed files with 59 additions and 14 deletions

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.*; import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -51,7 +51,6 @@ import org.apache.hadoop.hbase.io.hfile.TestHFileWriterV2;
import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogFactory; import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -119,7 +118,9 @@ public class TestCacheOnWriteInSchema {
private final CacheOnWriteType cowType; private final CacheOnWriteType cowType;
private Configuration conf; private Configuration conf;
private final String testDescription; private final String testDescription;
private HRegion region;
private HStore store; private HStore store;
private HLog hlog;
private FileSystem fs; private FileSystem fs;
public TestCacheOnWriteInSchema(CacheOnWriteType cowType) { public TestCacheOnWriteInSchema(CacheOnWriteType cowType) {
@ -161,18 +162,35 @@ public class TestCacheOnWriteInSchema {
fs.delete(logdir, true); fs.delete(logdir, true);
HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
HLog hlog = HLogFactory.createHLog(fs, basedir, logName, conf); hlog = HLogFactory.createHLog(fs, basedir, logName, conf);
HRegion region = new HRegion(basedir, hlog, fs, conf, info, htd, null); region = new HRegion(basedir, hlog, fs, conf, info, htd, null);
store = new HStore(basedir, region, hcd, fs, conf); store = new HStore(basedir, region, hcd, fs, conf);
} }
@After @After
public void tearDown() { public void tearDown() throws IOException {
IOException ex = null;
try {
region.close();
} catch (IOException e) {
LOG.warn("Caught Exception", e);
ex = e;
}
try {
hlog.closeAndDelete();
} catch (IOException e) {
LOG.warn("Caught Exception", e);
ex = e;
}
try { try {
fs.delete(new Path(DIR), true); fs.delete(new Path(DIR), true);
} catch (IOException e) { } catch (IOException e) {
LOG.error("Could not delete " + DIR, e); LOG.error("Could not delete " + DIR, e);
ex = e;
}
if (ex != null) {
throw ex;
} }
} }

View File

@ -25,21 +25,26 @@ import java.util.GregorianCalendar;
import java.util.List; import java.util.List;
import junit.framework.TestCase; import junit.framework.TestCase;
import org.junit.experimental.categories.Category;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
import org.apache.hadoop.hbase.regionserver.compactions.*; import org.apache.hadoop.hbase.regionserver.compactions.CompactionPolicy;
import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogFactory; import org.apache.hadoop.hbase.regionserver.wal.HLogFactory;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.junit.After;
import org.junit.experimental.categories.Category;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
@ -61,6 +66,8 @@ public class TestDefaultCompactSelection extends TestCase {
protected static final long minSize = 10; protected static final long minSize = 10;
protected static final long maxSize = 1000; protected static final long maxSize = 1000;
private HLog hlog;
private HRegion region;
@Override @Override
public void setUp() throws Exception { public void setUp() throws Exception {
@ -87,9 +94,9 @@ public class TestDefaultCompactSelection extends TestCase {
htd.addFamily(hcd); htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false); HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
HLog hlog = HLogFactory.createHLog(fs, basedir, hlog = HLogFactory.createHLog(fs, basedir,
logName, conf); logName, conf);
HRegion region = HRegion.createHRegion(info, basedir, conf, htd); region = HRegion.createHRegion(info, basedir, conf, htd);
HRegion.closeHRegion(region); HRegion.closeHRegion(region);
Path tableDir = new Path(basedir, Bytes.toString(htd.getName())); Path tableDir = new Path(basedir, Bytes.toString(htd.getName()));
region = new HRegion(tableDir, hlog, fs, conf, info, htd, null); region = new HRegion(tableDir, hlog, fs, conf, info, htd, null);
@ -101,6 +108,26 @@ public class TestDefaultCompactSelection extends TestCase {
fs.create(TEST_FILE); fs.create(TEST_FILE);
} }
@After
public void tearDown() throws IOException {
IOException ex = null;
try {
region.close();
} catch (IOException e) {
LOG.warn("Caught Exception", e);
ex = e;
}
try {
hlog.closeAndDelete();
} catch (IOException e) {
LOG.warn("Caught Exception", e);
ex = e;
}
if (ex != null) {
throw ex;
}
}
// used so our tests don't deal with actual StoreFiles // used so our tests don't deal with actual StoreFiles
static class MockStoreFile extends StoreFile { static class MockStoreFile extends StoreFile {
long length = 0; long length = 0;