HBASE-4847 Activate single jvm for small tests on jenkins

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1210643 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2011-12-05 21:20:04 +00:00
parent b955fa6c32
commit 7ba1306e5b
20 changed files with 227 additions and 97 deletions

View File

@ -307,11 +307,11 @@ echo "Small tests executed after $exeTime minutes"
if (test $parallelMaven -gt 0) if (test $parallelMaven -gt 0)
then then
echo "Running tests with two maven instances in parallel" echo "Running tests with two maven instances in parallel"
$mvnCommand -P nonParallelTests test -Dtest=$runList1 $args & $mvnCommand -P localTests test -Dtest=$runList1 $args &
#give some time to the fist process if there is anything to compile #give some time to the fist process if there is anything to compile
sleep 30 sleep 30
$mvnCommand -P nonParallelTests test -Dtest=$runList2 $args $mvnCommand -P localTests test -Dtest=$runList2 $args
#wait for forked process to finish #wait for forked process to finish
wait wait
@ -326,14 +326,14 @@ then
if (test $runAllTests -eq 1 && test ${#flakyTests} -gt 5) if (test $runAllTests -eq 1 && test ${#flakyTests} -gt 5)
then then
echo "Running flaky tests" echo "Running flaky tests"
$mvnCommand -P nonParallelTests test -Dtest=$flakyTests $args $mvnCommand -P localTests test -Dtest=$flakyTests $args
cleanProcess cleanProcess
exeTime=$(((`date +%s` - $startTime)/60)) exeTime=$(((`date +%s` - $startTime)/60))
echo "Flaky tests executed after $exeTime minutes" echo "Flaky tests executed after $exeTime minutes"
fi fi
else else
echo "Running tests with a single maven instance, no parallelization" echo "Running tests with a single maven instance, no parallelization"
$mvnCommand -P nonParallelTests test -Dtest=$runList1,$runList2,$flakyTests $args $mvnCommand -P localTests test -Dtest=$runList1,$runList2,$flakyTests $args
cleanProcess cleanProcess
exeTime=$(((`date +%s` - $startTime)/60)) exeTime=$(((`date +%s` - $startTime)/60))
echo "Single maven instance tests executed after $exeTime minutes" echo "Single maven instance tests executed after $exeTime minutes"
@ -420,7 +420,7 @@ then
if (test $replayFailed -gt 0) if (test $replayFailed -gt 0)
then then
echo "Replaying all tests that failed" echo "Replaying all tests that failed"
$mvnCommand -P nonParallelTests test -Dtest=$replayList $args $mvnCommand -P localTests test -Dtest=$replayList $args
echo "Replaying done" echo "Replaying done"
fi fi
fi fi

85
pom.xml
View File

@ -329,9 +329,7 @@
--> -->
<dependency> <dependency>
<groupId>org.apache.maven.surefire</groupId> <groupId>org.apache.maven.surefire</groupId>
<artifactId>surefire-junit47</artifactId> <artifactId>${surefire.provider}</artifactId>
<!-- Despite its name, junit47 supports 4.8 features
as categories -->
<version>${surefire.version}</version> <version>${surefire.version}</version>
</dependency> </dependency>
</dependencies> </dependencies>
@ -351,7 +349,23 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId> <artifactId>maven-failsafe-plugin</artifactId>
<version>${surefire.version}</version> <version>${surefire.version}</version>
<dependencies>
<dependency>
<groupId>org.apache.maven.surefire</groupId>
<artifactId>${surefire.provider}</artifactId>
<version>${surefire.version}</version>
</dependency>
</dependencies>
<configuration> <configuration>
<includes>
<include>${integrationtest.include}</include>
</includes>
<excludes>
<exlude>${unittest.include}</exlude>
<exclude>**/*$*</exclude>
<exclude>${test.exclude.pattern}</exclude>
</excludes>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<environmentVariables> <environmentVariables>
<LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/nativelib</LD_LIBRARY_PATH> <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/nativelib</LD_LIBRARY_PATH>
<DYLD_LIBRARY_PATH>${env.DYLD_LIBRARY_PATH}:${project.build.directory}/nativelib</DYLD_LIBRARY_PATH> <DYLD_LIBRARY_PATH>${env.DYLD_LIBRARY_PATH}:${project.build.directory}/nativelib</DYLD_LIBRARY_PATH>
@ -605,13 +619,13 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId> <artifactId>maven-surefire-plugin</artifactId>
<configuration> <configuration>
<skip>${surefire.skipfirstPartTests}</skip> <skip>${surefire.skipFirstPart}</skip>
<forkMode>${surefire.firstPartForkMode}</forkMode> <forkMode>${surefire.firstPartForkMode}</forkMode>
<parallel>${surefire.firstPartParallel}</parallel> <parallel>${surefire.firstPartParallel}</parallel>
<perCoreThreadCount>false</perCoreThreadCount> <perCoreThreadCount>false</perCoreThreadCount>
<threadCount>${surefire.firstPartThreadCount}</threadCount> <threadCount>${surefire.firstPartThreadCount}</threadCount>
<groups>${surefire.firstPartGroups}</groups> <groups>${surefire.firstPartGroups}</groups>
<testFailureIgnore>${surefire.hasSecondPart}</testFailureIgnore> <testFailureIgnore>false</testFailureIgnore>
</configuration> </configuration>
<executions> <executions>
<execution> <execution>
@ -635,7 +649,6 @@
<configuration> <configuration>
<skip>false</skip> <skip>false</skip>
<forkMode>always</forkMode> <forkMode>always</forkMode>
<groups>org.apache.hadoop.hbase.LargeTests</groups>
</configuration> </configuration>
</plugin> </plugin>
<plugin> <plugin>
@ -873,21 +886,23 @@
<package.version>0.91.0</package.version> <package.version>0.91.0</package.version>
<final.name>${project.artifactId}-${project.version}</final.name> <final.name>${project.artifactId}-${project.version}</final.name>
<!-- Test inclusion patterns used by failsafe configuration -->
<unittest.include>**/Test*.java</unittest.include>
<integrationtest.include>**/IntegrationTest*.java</integrationtest.include>
<surefire.version>2.11-TRUNK-HBASE-2</surefire.version> <surefire.version>2.11-TRUNK-HBASE-2</surefire.version>
<!-- in the default env, we're configured for the worst case --> <surefire.provider>surefire-junit47</surefire.provider>
<surefire.firstPartForkMode>always</surefire.firstPartForkMode>
<!-- default: run small & medium -->
<surefire.skipFirstPart>false</surefire.skipFirstPart>
<surefire.skipSecondPart>false</surefire.skipSecondPart>
<surefire.firstPartForkMode>once</surefire.firstPartForkMode>
<surefire.firstPartParallel>none</surefire.firstPartParallel> <surefire.firstPartParallel>none</surefire.firstPartParallel>
<surefire.firstPartThreadCount>1</surefire.firstPartThreadCount> <surefire.firstPartThreadCount>1</surefire.firstPartThreadCount>
<!-- default will be to run small & medium <surefire.firstPartGroups>org.apache.hadoop.hbase.SmallTests</surefire.firstPartGroups>
Right now we run all tests as before. <surefire.secondPartGroups>org.apache.hadoop.hbase.MediumTests</surefire.secondPartGroups>
-->
<surefire.skipFirstPart>false</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart>
<surefire.hasSecondPart>false</surefire.hasSecondPart>
<surefire.firstPartGroups></surefire.firstPartGroups>
<surefire.secondPartGroups></surefire.secondPartGroups>
</properties> </properties>
<!-- Sorted by groups of dependencies then groupId and artifactId --> <!-- Sorted by groups of dependencies then groupId and artifactId -->
@ -901,6 +916,7 @@
* javax.xml.stream:stax-api in favour of stax:stax-api * javax.xml.stream:stax-api in favour of stax:stax-api
--> -->
<!-- General dependencies --> <!-- General dependencies -->
<dependency> <dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
@ -1886,6 +1902,10 @@
<surefire.firstPartForkMode>once</surefire.firstPartForkMode> <surefire.firstPartForkMode>once</surefire.firstPartForkMode>
<surefire.firstPartParallel>none</surefire.firstPartParallel> <surefire.firstPartParallel>none</surefire.firstPartParallel>
<surefire.firstPartThreadCount>1</surefire.firstPartThreadCount> <surefire.firstPartThreadCount>1</surefire.firstPartThreadCount>
<surefire.skipFirstPart>false</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart>
<surefire.firstPartGroups></surefire.firstPartGroups>
</properties> </properties>
</profile> </profile>
@ -1912,6 +1932,7 @@
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
</activation> </activation>
<properties> <properties>
<surefire.firstPartForkMode>always</surefire.firstPartForkMode>
<surefire.skipFirstPart>false</surefire.skipFirstPart> <surefire.skipFirstPart>false</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart> <surefire.skipSecondPart>true</surefire.skipSecondPart>
<surefire.firstPartGroups>org.apache.hadoop.hbase.MediumTests</surefire.firstPartGroups> <surefire.firstPartGroups>org.apache.hadoop.hbase.MediumTests</surefire.firstPartGroups>
@ -1925,6 +1946,7 @@
<activeByDefault>false</activeByDefault> <activeByDefault>false</activeByDefault>
</activation> </activation>
<properties> <properties>
<surefire.firstPartForkMode>always</surefire.firstPartForkMode>
<surefire.skipFirstPart>false</surefire.skipFirstPart> <surefire.skipFirstPart>false</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart> <surefire.skipSecondPart>true</surefire.skipSecondPart>
<surefire.firstPartGroups>org.apache.hadoop.hbase.LargeTests</surefire.firstPartGroups> <surefire.firstPartGroups>org.apache.hadoop.hbase.LargeTests</surefire.firstPartGroups>
@ -1944,7 +1966,6 @@
<surefire.skipFirstPart>false</surefire.skipFirstPart> <surefire.skipFirstPart>false</surefire.skipFirstPart>
<surefire.skipSecondPart>false</surefire.skipSecondPart> <surefire.skipSecondPart>false</surefire.skipSecondPart>
<surefire.hasSecondPart>true</surefire.hasSecondPart>
<surefire.firstPartGroups>org.apache.hadoop.hbase.SmallTests</surefire.firstPartGroups> <surefire.firstPartGroups>org.apache.hadoop.hbase.SmallTests</surefire.firstPartGroups>
<surefire.secondPartGroups>org.apache.hadoop.hbase.MediumTests</surefire.secondPartGroups> <surefire.secondPartGroups>org.apache.hadoop.hbase.MediumTests</surefire.secondPartGroups>
</properties> </properties>
@ -1962,11 +1983,37 @@
<surefire.skipFirstPart>false</surefire.skipFirstPart> <surefire.skipFirstPart>false</surefire.skipFirstPart>
<surefire.skipSecondPart>false</surefire.skipSecondPart> <surefire.skipSecondPart>false</surefire.skipSecondPart>
<surefire.hasSecondPart>true</surefire.hasSecondPart>
<surefire.firstPartGroups>org.apache.hadoop.hbase.SmallTests</surefire.firstPartGroups> <surefire.firstPartGroups>org.apache.hadoop.hbase.SmallTests</surefire.firstPartGroups>
<surefire.secondPartGroups>org.apache.hadoop.hbase.MediumTests,org.apache.hadoop.hbase.LargeTests</surefire.secondPartGroups> <surefire.secondPartGroups>org.apache.hadoop.hbase.MediumTests,org.apache.hadoop.hbase.LargeTests</surefire.secondPartGroups>
</properties> </properties>
</profile> </profile>
<profile> <!-- Use it skip the surefire tests but no the failsafe tests -->
<id>skipSurefireTests</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<surefire.skipFirstPart>true</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart>
</properties>
</profile>
<profile> <!-- Use it to launch tests locally-->
<id>localTests</id>
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<surefire.provider>surefire-junit4</surefire.provider>
<surefire.version>2.10</surefire.version>
<surefire.firstPartForkMode>always</surefire.firstPartForkMode>
<surefire.skipFirstPart>false</surefire.skipFirstPart>
<surefire.skipSecondPart>true</surefire.skipSecondPart>
<surefire.firstPartGroups></surefire.firstPartGroups>
</properties>
</profile>
</profiles> </profiles>
<!-- See http://jira.codehaus.org/browse/MSITE-443 why the settings need to be here and not in pluginManagement. --> <!-- See http://jira.codehaus.org/browse/MSITE-443 why the settings need to be here and not in pluginManagement. -->

View File

@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.*;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
/** /**

View File

@ -97,6 +97,9 @@ public class TestColumnPrefixFilter {
while(scanner.next(results)); while(scanner.next(results));
assertEquals(prefixMap.get(s).size(), results.size()); assertEquals(prefixMap.get(s).size(), results.size());
} }
region.close();
region.getLog().closeAndDelete();
} }
@Test @Test
@ -157,6 +160,9 @@ public class TestColumnPrefixFilter {
while(scanner.next(results)); while(scanner.next(results));
assertEquals(prefixMap.get(s).size(), results.size()); assertEquals(prefixMap.get(s).size(), results.size());
} }
region.close();
region.getLog().closeAndDelete();
} }
List<String> generateRandomWords(int numberOfWords, String suffix) { List<String> generateRandomWords(int numberOfWords, String suffix) {

View File

@ -83,6 +83,7 @@ public class TestDependentColumnFilter extends TestCase {
protected void tearDown() throws Exception { protected void tearDown() throws Exception {
super.tearDown(); super.tearDown();
this.region.close(); this.region.close();
region.getLog().closeAndDelete();
} }
private void addData() throws IOException { private void addData() throws IOException {

View File

@ -37,6 +37,7 @@ import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterList.Operator; import org.apache.hadoop.hbase.filter.FilterList.Operator;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -163,7 +164,9 @@ public class TestFilter extends HBaseTestCase {
} }
protected void tearDown() throws Exception { protected void tearDown() throws Exception {
this.region.close(); HLog hlog = region.getLog();
region.close();
hlog.closeAndDelete();
super.tearDown(); super.tearDown();
} }

View File

@ -99,6 +99,9 @@ public class TestMultipleColumnPrefixFilter {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
while(scanner.next(results)); while(scanner.next(results));
assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size()); assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size());
region.close();
region.getLog().closeAndDelete();
} }
@Test @Test
@ -165,6 +168,9 @@ public class TestMultipleColumnPrefixFilter {
InternalScanner scanner = region.getScanner(scan); InternalScanner scanner = region.getScanner(scan);
while(scanner.next(results)); while(scanner.next(results));
assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size()); assertEquals(prefixMap.get("p").size() + prefixMap.get("q").size(), results.size());
region.close();
region.getLog().closeAndDelete();
} }
@Test @Test
@ -218,6 +224,9 @@ public class TestMultipleColumnPrefixFilter {
while(scanner2.next(results2)); while(scanner2.next(results2));
assertEquals(results1.size(), results2.size()); assertEquals(results1.size(), results2.size());
region.close();
region.getLog().closeAndDelete();
} }
List<String> generateRandomWords(int numberOfWords, String suffix) { List<String> generateRandomWords(int numberOfWords, String suffix) {

View File

@ -30,9 +30,9 @@ import java.net.InetSocketAddress;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.ipc.VersionedProtocol; import org.apache.hadoop.hbase.ipc.VersionedProtocol;
import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@ -45,7 +45,7 @@ import org.junit.experimental.categories.Category;
* be delayed. Check that the last two, which are undelayed, return before the * be delayed. Check that the last two, which are undelayed, return before the
* first one. * first one.
*/ */
@Category(SmallTests.class) @Category(MediumTests.class) // Fails sometimes with small tests
public class TestDelayedRpc { public class TestDelayedRpc {
public static RpcServer rpcServer; public static RpcServer rpcServer;
@ -233,12 +233,16 @@ public class TestDelayedRpc {
@Override @Override
public void run() { public void run() {
try {
Integer result = new Integer(server.test(delay)); Integer result = new Integer(server.test(delay));
if (results != null) { if (results != null) {
synchronized (results) { synchronized (results) {
results.add(result); results.add(result);
} }
} }
} catch (Exception e) {
fail("Unexpected exception: "+e.getMessage());
}
} }
} }

View File

@ -157,6 +157,9 @@ public class TestColumnSeeking {
assertEquals(kvSet.size(), results.size()); assertEquals(kvSet.size(), results.size());
assertTrue(results.containsAll(kvSet)); assertTrue(results.containsAll(kvSet));
} }
region.close();
region.getLog().closeAndDelete();
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -263,6 +266,9 @@ public class TestColumnSeeking {
assertEquals(kvSet.size(), results.size()); assertEquals(kvSet.size(), results.size());
assertTrue(results.containsAll(kvSet)); assertTrue(results.containsAll(kvSet));
} }
region.close();
region.getLog().closeAndDelete();
} }
List<String> generateRandomWords(int numberOfWords, String suffix) { List<String> generateRandomWords(int numberOfWords, String suffix) {

View File

@ -1339,6 +1339,7 @@ public class TestHRegion extends HBaseTestCase {
for (int i = 0; i < subregions.length; i++) { for (int i = 0; i < subregions.length; i++) {
try { try {
subregions[i].close(); subregions[i].close();
subregions[i].getLog().closeAndDelete();
} catch (IOException e) { } catch (IOException e) {
// Ignore. // Ignore.
} }

View File

@ -133,6 +133,9 @@ public class TestKeepDeletes extends HBaseTestCase {
r = region.get(g, null); r = region.get(g, null);
checkResult(r, c0, c0, T1); checkResult(r, c0, c0, T1);
assertEquals(0, countDeleteMarkers(region)); assertEquals(0, countDeleteMarkers(region));
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -180,6 +183,9 @@ public class TestKeepDeletes extends HBaseTestCase {
kvs = new ArrayList<KeyValue>(); kvs = new ArrayList<KeyValue>();
scan.next(kvs); scan.next(kvs);
assertTrue(kvs.isEmpty()); assertTrue(kvs.isEmpty());
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -222,6 +228,9 @@ public class TestKeepDeletes extends HBaseTestCase {
region.compactStores(true); region.compactStores(true);
// major compaction deleted it // major compaction deleted it
assertEquals(0, countDeleteMarkers(region)); assertEquals(0, countDeleteMarkers(region));
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -243,6 +252,9 @@ public class TestKeepDeletes extends HBaseTestCase {
} catch (DoNotRetryIOException dnre) { } catch (DoNotRetryIOException dnre) {
// ok! // ok!
} }
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -287,6 +299,9 @@ public class TestKeepDeletes extends HBaseTestCase {
assertTrue(kvs.get(3).isDeleteType()); assertTrue(kvs.get(3).isDeleteType());
assertEquals(kvs.get(4).getValue(), T2); assertEquals(kvs.get(4).getValue(), T2);
assertEquals(kvs.get(5).getValue(), T1); assertEquals(kvs.get(5).getValue(), T1);
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -327,6 +342,9 @@ public class TestKeepDeletes extends HBaseTestCase {
// major compaction removes all, since there are no puts they affect // major compaction removes all, since there are no puts they affect
region.compactStores(true); region.compactStores(true);
assertEquals(0, countDeleteMarkers(region)); assertEquals(0, countDeleteMarkers(region));
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -387,6 +405,9 @@ public class TestKeepDeletes extends HBaseTestCase {
// so after this collection all markers // so after this collection all markers
region.compactStores(true); region.compactStores(true);
assertEquals(0, countDeleteMarkers(region)); assertEquals(0, countDeleteMarkers(region));
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -464,6 +485,9 @@ public class TestKeepDeletes extends HBaseTestCase {
checkGet(region, T2, c0, c1, ts+3); checkGet(region, T2, c0, c1, ts+3);
checkGet(region, T2, c1, c0, ts+3, T2, T1); checkGet(region, T2, c1, c0, ts+3, T2, T1);
checkGet(region, T2, c1, c1, ts+3, T2, T1); checkGet(region, T2, c1, c1, ts+3, T2, T1);
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -556,6 +580,9 @@ public class TestKeepDeletes extends HBaseTestCase {
region.compactStores(true); region.compactStores(true);
region.compactStores(true); region.compactStores(true);
assertEquals(1, countDeleteMarkers(region)); assertEquals(1, countDeleteMarkers(region));
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -604,6 +631,9 @@ public class TestKeepDeletes extends HBaseTestCase {
scanner.next(kvs); scanner.next(kvs);
assertEquals(4, kvs.size()); assertEquals(4, kvs.size());
scanner.close(); scanner.close();
region.close();
region.getLog().closeAndDelete();
} }
/** /**
@ -679,6 +709,9 @@ public class TestKeepDeletes extends HBaseTestCase {
// so after the next compaction the last family delete marker is also gone // so after the next compaction the last family delete marker is also gone
region.compactStores(true); region.compactStores(true);
assertEquals(0, countDeleteMarkers(region)); assertEquals(0, countDeleteMarkers(region));
region.close();
region.getLog().closeAndDelete();
} }
private void checkGet(HRegion region, byte[] row, byte[] fam, byte[] col, private void checkGet(HRegion region, byte[] row, byte[] fam, byte[] col,

View File

@ -308,6 +308,7 @@ public class TestMultiColumnScanner {
LOG.info("Number of row/col pairs deleted at least once: " + LOG.info("Number of row/col pairs deleted at least once: " +
lastDelTimeMap.size()); lastDelTimeMap.size());
region.close(); region.close();
region.getLog().closeAndDelete();
} }
static HRegion createRegion(String tableName, static HRegion createRegion(String tableName,

View File

@ -94,5 +94,7 @@ public class TestResettingCounters {
assertTrue(Bytes.equals(kvs[i].getQualifier(), qualifiers[i])); assertTrue(Bytes.equals(kvs[i].getQualifier(), qualifiers[i]));
assertEquals(6, Bytes.toLong(kvs[i].getValue())); assertEquals(6, Bytes.toLong(kvs[i].getValue()));
} }
region.close();
region.getLog().closeAndDelete();
} }
} }

View File

@ -104,6 +104,7 @@ public class TestScanWithBloomError {
scanColSet(new int[]{1, 4, 6, 7}, new int[]{1, 6, 7}); scanColSet(new int[]{1, 4, 6, 7}, new int[]{1, 6, 7});
region.close(); region.close();
region.getLog().closeAndDelete();
} }
private void scanColSet(int[] colSet, int[] expectedResultCols) private void scanColSet(int[] colSet, int[] expectedResultCols)

View File

@ -427,6 +427,7 @@ public class TestSeekOptimizations {
public void tearDown() throws IOException { public void tearDown() throws IOException {
if (region != null) { if (region != null) {
region.close(); region.close();
region.getLog().closeAndDelete();
} }
// We have to re-set the lazy seek flag back to the default so that other // We have to re-set the lazy seek flag back to the default so that other

View File

@ -200,6 +200,7 @@ public class TestSplitTransaction {
daughtersRowCount += count; daughtersRowCount += count;
} finally { } finally {
openRegion.close(); openRegion.close();
openRegion.getLog().closeAndDelete();
} }
} }
assertEquals(rowcount, daughtersRowCount); assertEquals(rowcount, daughtersRowCount);
@ -255,6 +256,7 @@ public class TestSplitTransaction {
daughtersRowCount += count; daughtersRowCount += count;
} finally { } finally {
openRegion.close(); openRegion.close();
openRegion.getLog().closeAndDelete();
} }
} }
assertEquals(rowcount, daughtersRowCount); assertEquals(rowcount, daughtersRowCount);

View File

@ -388,6 +388,12 @@ public class TestStore extends TestCase {
assertEquals(oldValue, Bytes.toLong(results.get(1).getValue())); assertEquals(oldValue, Bytes.toLong(results.get(1).getValue()));
} }
@Override
protected void tearDown() throws Exception {
super.tearDown();
EnvironmentEdgeManagerTestHelper.reset();
}
public void testICV_negMemstoreSize() throws IOException { public void testICV_negMemstoreSize() throws IOException {
init(this.getName()); init(this.getName());

View File

@ -41,7 +41,8 @@ import java.util.NavigableSet;
import java.util.TreeSet; import java.util.TreeSet;
import static org.apache.hadoop.hbase.regionserver.KeyValueScanFixture.scanFixture; import static org.apache.hadoop.hbase.regionserver.KeyValueScanFixture.scanFixture;
@Category(SmallTests.class) // Can't be small as it plays with EnvironmentEdgeManager
@Category(MediumTests.class)
public class TestStoreScanner extends TestCase { public class TestStoreScanner extends TestCase {
private static final String CF_STR = "cf"; private static final String CF_STR = "cf";
final byte [] CF = Bytes.toBytes(CF_STR); final byte [] CF = Bytes.toBytes(CF_STR);
@ -502,6 +503,7 @@ public class TestStoreScanner extends TestCase {
} }
public void testDeleteMarkerLongevity() throws Exception { public void testDeleteMarkerLongevity() throws Exception {
try {
final long now = System.currentTimeMillis(); final long now = System.currentTimeMillis();
EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() { EnvironmentEdgeManagerTestHelper.injectEdge(new EnvironmentEdge() {
public long currentTimeMillis() { public long currentTimeMillis() {
@ -566,5 +568,8 @@ public class TestStoreScanner extends TestCase {
assertEquals(kvs[14], results.get(5)); assertEquals(kvs[14], results.get(5));
assertEquals(kvs[15], results.get(6)); assertEquals(kvs[15], results.get(6));
assertEquals(7, results.size()); assertEquals(7, results.size());
}finally{
EnvironmentEdgeManagerTestHelper.reset();
}
} }
} }

View File

@ -36,7 +36,8 @@ import org.junit.experimental.categories.Category;
/** /**
* Tests for {@link FSTableDescriptors}. * Tests for {@link FSTableDescriptors}.
*/ */
@Category(SmallTests.class) // Do not support to be executed in he same JVM as other tests
@Category(MediumTests.class)
public class TestFSTableDescriptors { public class TestFSTableDescriptors {
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final Log LOG = LogFactory.getLog(TestFSTableDescriptors.class); private static final Log LOG = LogFactory.getLog(TestFSTableDescriptors.class);

View File

@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import org.apache.hadoop.hbase.MediumTests; import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -29,7 +30,7 @@ import static junit.framework.Assert.assertEquals;
* Tests that the incrementing environment edge increments time instead of using * Tests that the incrementing environment edge increments time instead of using
* the default. * the default.
*/ */
@Category(MediumTests.class) @Category(SmallTests.class)
public class TestIncrementingEnvironmentEdge { public class TestIncrementingEnvironmentEdge {
@Test @Test