HBASE-14637 Loosen TestChoreService assert AND have TestDataBlockEncoders do less work (and add timeouts)

This commit is contained in:
stack 2015-10-17 16:14:45 -07:00
parent d9ee191318
commit 71b38d60bb
2 changed files with 16 additions and 6 deletions

View File

@ -38,8 +38,6 @@ import org.junit.experimental.categories.Category;
@Category(SmallTests.class) @Category(SmallTests.class)
public class TestChoreService { public class TestChoreService {
private static final Log LOG = LogFactory.getLog(TestChoreService.class);
/** /**
* A few ScheduledChore samples that are useful for testing with ChoreService * A few ScheduledChore samples that are useful for testing with ChoreService
*/ */
@ -373,7 +371,7 @@ public class TestChoreService {
final int period = 100; final int period = 100;
final int delta = 5; final int delta = 5;
ChoreService service = ChoreService.getInstance("testForceTrigger"); ChoreService service = ChoreService.getInstance("testForceTrigger");
CountingChore chore = new CountingChore("countingChore", period); final CountingChore chore = new CountingChore("countingChore", period);
try { try {
service.scheduleChore(chore); service.scheduleChore(chore);
Thread.sleep(10 * period + delta); Thread.sleep(10 * period + delta);
@ -393,11 +391,12 @@ public class TestChoreService {
chore.triggerNow(); chore.triggerNow();
Thread.sleep(delta); Thread.sleep(delta);
assertTrue(chore.getCountOfChoreCalls() == 16); assertTrue("" + chore.getCountOfChoreCalls(), chore.getCountOfChoreCalls() == 16);
Thread.sleep(10 * period + delta); Thread.sleep(10 * period + delta);
assertTrue(chore.getCountOfChoreCalls() == 26); // Be loosey-goosey. It used to be '26' but it was a big flakey relying on timing.
assertTrue("" + chore.getCountOfChoreCalls(), chore.getCountOfChoreCalls() > 16);
} finally { } finally {
shutdownService(service); shutdownService(service);
} }

View File

@ -30,6 +30,7 @@ import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
@ -49,11 +50,14 @@ import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.test.RedundantKVGenerator; import org.apache.hadoop.hbase.util.test.RedundantKVGenerator;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestRule;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Parameterized.Parameters;
import org.mortbay.log.Log;
/** /**
* Test all of the data block encoding algorithms for correctness. Most of the * Test all of the data block encoding algorithms for correctness. Most of the
@ -62,9 +66,11 @@ import org.junit.runners.Parameterized.Parameters;
@Category({IOTests.class, LargeTests.class}) @Category({IOTests.class, LargeTests.class})
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class TestDataBlockEncoders { public class TestDataBlockEncoders {
@Rule public final TestRule timeout = CategoryBasedTimeout.builder().
withTimeout(this.getClass()).withLookingForStuckThread(true).build();
private static int NUMBER_OF_KV = 10000; private static int NUMBER_OF_KV = 10000;
private static int NUM_RANDOM_SEEKS = 10000; private static int NUM_RANDOM_SEEKS = 1000;
private static int ENCODED_DATA_OFFSET = HConstants.HFILEBLOCK_HEADER_SIZE private static int ENCODED_DATA_OFFSET = HConstants.HFILEBLOCK_HEADER_SIZE
+ DataBlockEncoding.ID_SIZE; + DataBlockEncoding.ID_SIZE;
@ -182,6 +188,7 @@ public class TestDataBlockEncoders {
List<DataBlockEncoder.EncodedSeeker> encodedSeekers = List<DataBlockEncoder.EncodedSeeker> encodedSeekers =
new ArrayList<DataBlockEncoder.EncodedSeeker>(); new ArrayList<DataBlockEncoder.EncodedSeeker>();
for (DataBlockEncoding encoding : DataBlockEncoding.values()) { for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
Log.info("Encoding: " + encoding);
// Off heap block data support not added for PREFIX_TREE DBE yet. // Off heap block data support not added for PREFIX_TREE DBE yet.
// TODO remove this once support is added. HBASE-12298 // TODO remove this once support is added. HBASE-12298
if (this.useOffheapData && encoding == DataBlockEncoding.PREFIX_TREE) continue; if (this.useOffheapData && encoding == DataBlockEncoding.PREFIX_TREE) continue;
@ -189,6 +196,7 @@ public class TestDataBlockEncoders {
if (encoder == null) { if (encoder == null) {
continue; continue;
} }
Log.info("Encoder: " + encoder);
ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv,
getEncodingContext(Compression.Algorithm.NONE, encoding), this.useOffheapData); getEncodingContext(Compression.Algorithm.NONE, encoding), this.useOffheapData);
HFileContext meta = new HFileContextBuilder() HFileContext meta = new HFileContextBuilder()
@ -202,6 +210,7 @@ public class TestDataBlockEncoders {
seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer)); seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
encodedSeekers.add(seeker); encodedSeekers.add(seeker);
} }
Log.info("Testing it!");
// test it! // test it!
// try a few random seeks // try a few random seeks
for (boolean seekBefore : new boolean[] { false, true }) { for (boolean seekBefore : new boolean[] { false, true }) {
@ -219,6 +228,7 @@ public class TestDataBlockEncoders {
} }
// check edge cases // check edge cases
Log.info("Checking edge cases");
checkSeekingConsistency(encodedSeekers, false, sampleKv.get(0)); checkSeekingConsistency(encodedSeekers, false, sampleKv.get(0));
for (boolean seekBefore : new boolean[] { false, true }) { for (boolean seekBefore : new boolean[] { false, true }) {
checkSeekingConsistency(encodedSeekers, seekBefore, sampleKv.get(sampleKv.size() - 1)); checkSeekingConsistency(encodedSeekers, seekBefore, sampleKv.get(sampleKv.size() - 1));
@ -226,6 +236,7 @@ public class TestDataBlockEncoders {
Cell lastMidKv =CellUtil.createLastOnRowCol(midKv); Cell lastMidKv =CellUtil.createLastOnRowCol(midKv);
checkSeekingConsistency(encodedSeekers, seekBefore, lastMidKv); checkSeekingConsistency(encodedSeekers, seekBefore, lastMidKv);
} }
Log.info("Done");
} }
static ByteBuffer encodeKeyValues(DataBlockEncoding encoding, List<KeyValue> kvs, static ByteBuffer encodeKeyValues(DataBlockEncoding encoding, List<KeyValue> kvs,