speed up testcase if TESTS_NIGHTLY=false

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1140626 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Simon Willnauer 2011-06-28 13:59:24 +00:00
parent c7bd833255
commit 95028899b6
1 changed files with 12 additions and 16 deletions

View File

@ -50,12 +50,12 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
public void testFlushByRam() throws CorruptIndexException, public void testFlushByRam() throws CorruptIndexException,
LockObtainFailedException, IOException, InterruptedException { LockObtainFailedException, IOException, InterruptedException {
int[] numThreads = new int[] { 3 + random.nextInt(12), 1 }; int[] numThreads = new int[] { 2 + atLeast(1), 1 };
for (int i = 0; i < numThreads.length; i++) { for (int i = 0; i < numThreads.length; i++) {
runFlushByRam(numThreads[i], final double ramBuffer = (TEST_NIGHTLY ? 1 : 10) + atLeast(2) + random.nextDouble();
1 + random.nextInt(10) + random.nextDouble(), false); runFlushByRam(numThreads[i], ramBuffer
, false);
} }
for (int i = 0; i < numThreads.length; i++) { for (int i = 0; i < numThreads.length; i++) {
// with a 256 mb ram buffer we should never stall // with a 256 mb ram buffer we should never stall
runFlushByRam(numThreads[i], 256.d, true); runFlushByRam(numThreads[i], 256.d, true);
@ -65,13 +65,13 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
protected void runFlushByRam(int numThreads, double maxRamMB, protected void runFlushByRam(int numThreads, double maxRamMB,
boolean ensureNotStalled) throws IOException, CorruptIndexException, boolean ensureNotStalled) throws IOException, CorruptIndexException,
LockObtainFailedException, InterruptedException { LockObtainFailedException, InterruptedException {
final int numDocumentsToIndex = 50 + random.nextInt(150); final int numDocumentsToIndex = 10 + atLeast(30);
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex); AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
Directory dir = newDirectory(); Directory dir = newDirectory();
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy(); MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setFlushPolicy(flushPolicy); new MockAnalyzer(random)).setFlushPolicy(flushPolicy);
final int numDWPT = 1 + random.nextInt(8); final int numDWPT = 1 + atLeast(2);
DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool( DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(
numDWPT); numDWPT);
iwc.setIndexerThreadPool(threadPool); iwc.setIndexerThreadPool(threadPool);
@ -118,21 +118,21 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
public void testFlushDocCount() throws CorruptIndexException, public void testFlushDocCount() throws CorruptIndexException,
LockObtainFailedException, IOException, InterruptedException { LockObtainFailedException, IOException, InterruptedException {
int[] numThreads = new int[] { 3 + random.nextInt(12), 1 }; int[] numThreads = new int[] { 2 + atLeast(1), 1 };
for (int i = 0; i < numThreads.length; i++) { for (int i = 0; i < numThreads.length; i++) {
final int numDocumentsToIndex = 50 + random.nextInt(150); final int numDocumentsToIndex = 50 + atLeast(30);
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex); AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
Directory dir = newDirectory(); Directory dir = newDirectory();
MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy(); MockDefaultFlushPolicy flushPolicy = new MockDefaultFlushPolicy();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
new MockAnalyzer(random)).setFlushPolicy(flushPolicy); new MockAnalyzer(random)).setFlushPolicy(flushPolicy);
final int numDWPT = 1 + random.nextInt(8); final int numDWPT = 1 + atLeast(2);
DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool( DocumentsWriterPerThreadPool threadPool = new ThreadAffinityDocumentsWriterThreadPool(
numDWPT); numDWPT);
iwc.setIndexerThreadPool(threadPool); iwc.setIndexerThreadPool(threadPool);
iwc.setMaxBufferedDocs(2 + random.nextInt(50)); iwc.setMaxBufferedDocs(2 + atLeast(10));
iwc.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH); iwc.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
iwc.setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH); iwc.setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
IndexWriter writer = new IndexWriter(dir, iwc); IndexWriter writer = new IndexWriter(dir, iwc);
@ -170,7 +170,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
public void testRandom() throws IOException, InterruptedException { public void testRandom() throws IOException, InterruptedException {
final int numThreads = 1 + random.nextInt(8); final int numThreads = 1 + random.nextInt(8);
final int numDocumentsToIndex = 100 + random.nextInt(300); final int numDocumentsToIndex = 50 + atLeast(70);
AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex); AtomicInteger numDocs = new AtomicInteger(numDocumentsToIndex);
Directory dir = newDirectory(); Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT, IndexWriterConfig iwc = newIndexWriterConfig(TEST_VERSION_CURRENT,
@ -317,11 +317,7 @@ public class TestFlushByRamOrCountsPolicy extends LuceneTestCase {
ramSize = newRamSize; ramSize = newRamSize;
} }
if (doRandomCommit) { if (doRandomCommit) {
int commit; if (rarely()) {
synchronized (random) {
commit = random.nextInt(20);
}
if (commit == 0) {
writer.commit(); writer.commit();
} }
} }