HBASE-5817 Fix uncategorized tests

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1327691 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-04-18 21:31:00 +00:00
parent a0a2674c42
commit 56a4420af1
13 changed files with 67 additions and 4 deletions

View File

@ -338,7 +338,10 @@ report: "[INFO] Tests are skipped". It's harmless.
</para> </para>
<para>Running <programlisting>mvn test -P runLargeTests</programlisting> execute medium tests in a single JVM. <para>Running <programlisting>mvn test -P runLargeTests</programlisting> execute medium tests in a single JVM.
</para> </para>
</section>
<section xml:id="hbase.unittests.cmds.test.hbasetests">
<title><command>hbasetests.sh</command></title>
<para>It's also possible to use the script <command>hbasetests.sh</command>. This script runs the medium and <para>It's also possible to use the script <command>hbasetests.sh</command>. This script runs the medium and
large tests in parallel with two maven instances, and provide a single report. large tests in parallel with two maven instances, and provide a single report.
It must be executed from the directory which contains the <filename>pom.xml</filename>.</para> It must be executed from the directory which contains the <filename>pom.xml</filename>.</para>

View File

@ -34,7 +34,7 @@ import java.util.Set;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -64,7 +65,7 @@ import com.sun.org.apache.commons.logging.LogFactory;
* Verifies ProcessRowEndpoint works. * Verifies ProcessRowEndpoint works.
* The tested RowProcessor performs two scans and a read-modify-write. * The tested RowProcessor performs two scans and a read-modify-write.
*/ */
@Category(SmallTests.class) @Category(MediumTests.class)
public class TestRowProcessorEndpoint { public class TestRowProcessorEndpoint {
static final Log LOG = LogFactory.getLog(TestRowProcessorEndpoint.class); static final Log LOG = LogFactory.getLog(TestRowProcessorEndpoint.class);

View File

@ -34,6 +34,7 @@ import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.ipc.VersionedProtocol; import org.apache.hadoop.hbase.ipc.VersionedProtocol;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent; import org.apache.log4j.spi.LoggingEvent;

View File

@ -27,12 +27,16 @@ import java.net.InetSocketAddress;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.hbase.MediumTests;
import org.junit.Test; import org.junit.Test;
import com.google.protobuf.DescriptorProtos; import com.google.protobuf.DescriptorProtos;
import com.google.protobuf.DescriptorProtos.EnumDescriptorProto; import com.google.protobuf.DescriptorProtos.EnumDescriptorProto;
import org.junit.experimental.categories.Category;
/** Unit tests to test PB-based types on WritableRpcEngine. */ /** Unit tests to test PB-based types on WritableRpcEngine. */
@Category(MediumTests.class)
public class TestPBOnWritableRpc { public class TestPBOnWritableRpc {
private static Configuration conf = new Configuration(); private static Configuration conf = new Configuration();
@ -124,4 +128,8 @@ public class TestPBOnWritableRpc {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
new TestPBOnWritableRpc().testCallsInternal(conf); new TestPBOnWritableRpc().testCallsInternal(conf);
} }
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
} }

View File

@ -61,6 +61,7 @@ import org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper;
import org.apache.hadoop.hbase.zookeeper.ZKAssign; import org.apache.hadoop.hbase.zookeeper.ZKAssign;
import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.Watcher; import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.KeeperException.NodeExistsException; import org.apache.zookeeper.KeeperException.NodeExistsException;
@ -79,7 +80,7 @@ import com.google.protobuf.ServiceException;
/** /**
* Test {@link AssignmentManager} * Test {@link AssignmentManager}
*/ */
@Category(SmallTests.class) @Category(MediumTests.class)
public class TestAssignmentManager { public class TestAssignmentManager {
private static final HBaseTestingUtility HTU = new HBaseTestingUtility(); private static final HBaseTestingUtility HTU = new HBaseTestingUtility();
private static final ServerName SERVERNAME_A = private static final ServerName SERVERNAME_A =

View File

@ -26,10 +26,13 @@ import junit.framework.Assert;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HServerLoad; import org.apache.hadoop.hbase.HServerLoad;
import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.MediumTests;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(MediumTests.class)
public class TestMXBean { public class TestMXBean {
private static final HBaseTestingUtility TEST_UTIL = private static final HBaseTestingUtility TEST_UTIL =
@ -92,4 +95,7 @@ public class TestMXBean {
} }
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
} }

View File

@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.zookeeper.RootRegionTracker; import org.apache.hadoop.hbase.zookeeper.RootRegionTracker;
import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
@ -52,6 +53,7 @@ import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.junit.experimental.categories.Category;
/** /**
* Standup the master and fake it to test various aspects of master function. * Standup the master and fake it to test various aspects of master function.
@ -61,6 +63,7 @@ import org.mockito.Mockito;
* TODO: Speed up the zk connection by Master. It pauses 5 seconds establishing * TODO: Speed up the zk connection by Master. It pauses 5 seconds establishing
* session. * session.
*/ */
@Category(MediumTests.class)
public class TestMasterNoCluster { public class TestMasterNoCluster {
private static final HBaseTestingUtility TESTUTIL = new HBaseTestingUtility(); private static final HBaseTestingUtility TESTUTIL = new HBaseTestingUtility();
@ -326,4 +329,8 @@ public class TestMasterNoCluster {
master.join(); master.join();
} }
} }
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
} }

View File

@ -23,8 +23,11 @@ import java.util.List;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestExactCounterMetric { public class TestExactCounterMetric {
@Test @Test
@ -44,4 +47,8 @@ public class TestExactCounterMetric {
i--; i--;
} }
} }
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
} }

View File

@ -22,8 +22,11 @@ import junit.framework.Assert;
import org.apache.hadoop.hbase.metrics.histogram.ExponentiallyDecayingSample; import org.apache.hadoop.hbase.metrics.histogram.ExponentiallyDecayingSample;
import org.apache.hadoop.hbase.metrics.histogram.Snapshot; import org.apache.hadoop.hbase.metrics.histogram.Snapshot;
import org.apache.hadoop.hbase.SmallTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestExponentiallyDecayingSample { public class TestExponentiallyDecayingSample {
@Test @Test
@ -60,4 +63,8 @@ public class TestExponentiallyDecayingSample {
Assert.assertTrue(i >= 0.0 && i < 1000.0); Assert.assertTrue(i >= 0.0 && i < 1000.0);
} }
} }
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
} }

View File

@ -23,9 +23,12 @@ import java.util.Random;
import org.apache.hadoop.hbase.metrics.histogram.MetricsHistogram; import org.apache.hadoop.hbase.metrics.histogram.MetricsHistogram;
import org.apache.hadoop.hbase.metrics.histogram.Snapshot; import org.apache.hadoop.hbase.metrics.histogram.Snapshot;
import org.apache.hadoop.hbase.SmallTests;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(SmallTests.class)
public class TestMetricsHistogram { public class TestMetricsHistogram {
@Test @Test
@ -95,4 +98,8 @@ public class TestMetricsHistogram {
&& s.get95thPercentile() <= maxAcceptableninetyFifth); && s.get95thPercentile() <= maxAcceptableninetyFifth);
} }
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
} }

View File

@ -21,10 +21,13 @@ import junit.framework.Assert;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.MediumTests;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(MediumTests.class)
public class TestMXBean { public class TestMXBean {
private static final HBaseTestingUtility TEST_UTIL = private static final HBaseTestingUtility TEST_UTIL =
@ -55,4 +58,8 @@ public class TestMXBean {
Assert.assertEquals(rs.getZooKeeperWatcher().getQuorum(), Assert.assertEquals(rs.getZooKeeperWatcher().getQuorum(),
info.getZookeeperQuorum()); info.getZookeeperQuorum());
} }
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
} }

View File

@ -49,11 +49,14 @@ import org.apache.hadoop.hbase.master.TestMasterFailover;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread; import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.LargeTests;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category;
@Category(LargeTests.class)
public class TestRSKilledWhenMasterInitializing { public class TestRSKilledWhenMasterInitializing {
private static final Log LOG = LogFactory.getLog(TestMasterFailover.class); private static final Log LOG = LogFactory.getLog(TestMasterFailover.class);
@ -257,5 +260,4 @@ public class TestRSKilledWhenMasterInitializing {
@org.junit.Rule @org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
} }

View File

@ -29,11 +29,14 @@ import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.MediumTests;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category;
/** /**
* A basic unit test that spins up a local HBase cluster. * A basic unit test that spins up a local HBase cluster.
*/ */
@Category(MediumTests.class)
public class TestProcessBasedCluster { public class TestProcessBasedCluster {
private static final Log LOG = LogFactory.getLog(TestProcessBasedCluster.class); private static final Log LOG = LogFactory.getLog(TestProcessBasedCluster.class);
@ -86,4 +89,7 @@ public class TestProcessBasedCluster {
assertTrue(pom.getPath() + " does not exist", pom.exists()); assertTrue(pom.getPath() + " does not exist", pom.exists());
} }
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
} }