LUCENE-1844: speed up the unit tests

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@885214 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2009-11-29 11:31:12 +00:00
parent aaff6efa73
commit 7d6afb94a0
25 changed files with 198 additions and 186 deletions

View File

@ -45,6 +45,11 @@ Optimizations
Build
Test Cases
* LUCENE-1844: Speed up the unit tests (Mark Miller, Erick Erickson,
Mike McCandless)
======================= Release 3.0.0 2009-11-25 =======================
Changes in backwards compatibility policy

View File

@ -96,7 +96,6 @@ public class PerfRunData {
System.out.println("------------> queries:");
System.out.println(getQueryMaker(new SearchTask(this)).printQueries());
}
}
// clean old stuff, reopen

View File

@ -17,7 +17,6 @@
package org.apache.lucene.benchmark.byTask;
import java.io.IOException;
import java.io.StringReader;
import java.io.File;
import java.io.FileReader;
@ -25,9 +24,6 @@ import java.io.BufferedReader;
import java.util.List;
import java.util.Iterator;
import org.apache.lucene.benchmark.byTask.feeds.DocData;
import org.apache.lucene.benchmark.byTask.feeds.NoMoreDataException;
import org.apache.lucene.benchmark.byTask.feeds.ReutersContentSource;
import org.apache.lucene.benchmark.byTask.feeds.ReutersQueryMaker;
import org.apache.lucene.benchmark.byTask.tasks.CountingSearchTestTask;
import org.apache.lucene.benchmark.byTask.tasks.CountingHighlighterTestTask;
@ -42,13 +38,12 @@ import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.store.Directory;
import org.apache.lucene.search.FieldCache.StringIndex;
import org.apache.lucene.search.FieldCache;
import junit.framework.TestCase;
import org.apache.lucene.util.LuceneTestCase;
/**
* Test very simply that perf tasks - simple algorithms - are doing what they should.
*/
public class TestPerfTasksLogic extends TestCase {
public class TestPerfTasksLogic extends LuceneTestCase {
private static final boolean DEBUG = false;
static final String NEW_LINE = System.getProperty("line.separator");
@ -106,13 +101,14 @@ public class TestPerfTasksLogic extends TestCase {
*/
public void testTimedSearchTask() throws Exception {
String algLines[] = {
"log.step=100000",
"ResetSystemErase",
"CreateIndex",
"{ AddDoc } : 1000",
"{ AddDoc } : 100",
"Optimize",
"CloseIndex",
"OpenReader",
"{ CountingSearchTest } : 1.5s",
"{ CountingSearchTest } : .5s",
"CloseReader",
};
@ -126,6 +122,7 @@ public class TestPerfTasksLogic extends TestCase {
public void testBGSearchTaskThreads() throws Exception {
String algLines[] = {
"log.time.step.msec = 100",
"log.step=100000",
"ResetSystemErase",
"CreateIndex",
"{ AddDoc } : 1000",
@ -134,7 +131,7 @@ public class TestPerfTasksLogic extends TestCase {
"OpenReader",
"{",
" [ \"XSearch\" { CountingSearchTest > : * ] : 2 &-1",
" Wait(1.0)",
" Wait(0.5)",
"}",
"CloseReader",
"RepSumByPref X"
@ -149,11 +146,12 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"doc.stored=true",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"query.maker=" + ReutersQueryMaker.class.getName(),
"ResetSystemErase",
"CreateIndex",
"{ AddDoc } : 1000",
"{ AddDoc } : 100",
"Optimize",
"CloseIndex",
"OpenReader(true)",
@ -168,7 +166,7 @@ public class TestPerfTasksLogic extends TestCase {
Benchmark benchmark = execBenchmark(algLines);
// 4. test specific checks after the benchmark run completed.
assertEquals("TestSearchTask was supposed to be called!",147,CountingHighlighterTestTask.numDocsRetrieved);
assertEquals("TestSearchTask was supposed to be called!",92,CountingHighlighterTestTask.numDocsRetrieved);
//pretty hard to figure out a priori how many docs are going to have highlighted fragments returned, but we can never have more than the number of docs
//we probably should use a different doc/query maker, but...
assertTrue("TestSearchTask was supposed to be called!", CountingHighlighterTestTask.numDocsRetrieved >= CountingHighlighterTestTask.numHighlightedResults && CountingHighlighterTestTask.numHighlightedResults > 0);
@ -178,7 +176,7 @@ public class TestPerfTasksLogic extends TestCase {
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false, IndexWriter.MaxFieldLength.LIMITED);
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
assertEquals("100 docs were added to the index, this is what we expect to find!",100,ir.numDocs());
ir.close();
}
@ -187,7 +185,8 @@ public class TestPerfTasksLogic extends TestCase {
String algLines[] = {
"doc.stored=true",//doc storage is required in order to have text to highlight
"doc.term.vector.offsets=true",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"query.maker=" + ReutersQueryMaker.class.getName(),
"ResetSystemErase",
"CreateIndex",
@ -206,7 +205,7 @@ public class TestPerfTasksLogic extends TestCase {
Benchmark benchmark = execBenchmark(algLines);
// 4. test specific checks after the benchmark run completed.
assertEquals("TestSearchTask was supposed to be called!",147,CountingHighlighterTestTask.numDocsRetrieved);
assertEquals("TestSearchTask was supposed to be called!",92,CountingHighlighterTestTask.numDocsRetrieved);
//pretty hard to figure out a priori how many docs are going to have highlighted fragments returned, but we can never have more than the number of docs
//we probably should use a different doc/query maker, but...
assertTrue("TestSearchTask was supposed to be called!", CountingHighlighterTestTask.numDocsRetrieved >= CountingHighlighterTestTask.numHighlightedResults && CountingHighlighterTestTask.numHighlightedResults > 0);
@ -224,7 +223,8 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"doc.stored=false",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"query.maker=" + ReutersQueryMaker.class.getName(),
"ResetSystemErase",
"CreateIndex",
@ -308,7 +308,7 @@ public class TestPerfTasksLogic extends TestCase {
"doc.index.props=true",
"# ----- alg ",
"CreateIndex",
"[ { AddDoc > : 2500 ] : 4",
"[ { AddDoc > : 250 ] : 4",
"CloseIndex",
};
@ -321,8 +321,8 @@ public class TestPerfTasksLogic extends TestCase {
IndexReader r = IndexReader.open(benchmark.getRunData().getDirectory(), true);
StringIndex idx = FieldCache.DEFAULT.getStringIndex(r, "country");
final int maxDoc = r.maxDoc();
assertEquals(10000, maxDoc);
for(int i=0;i<10000;i++) {
assertEquals(1000, maxDoc);
for(int i=0;i<1000;i++) {
assertNotNull("doc " + i + " has null country", idx.lookup[idx.order[i]]);
}
r.close();
@ -335,7 +335,8 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"# ----- properties ",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=3",
"doc.term.vector=false",
"content.source.forever=false",
@ -353,7 +354,7 @@ public class TestPerfTasksLogic extends TestCase {
// 3. test number of docs in the index
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
int ndocsExpected = 20; // Reuters20ContentSource exhausts after 20 docs.
int ndocsExpected = 20; // first 20 reuters docs.
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
ir.close();
}
@ -365,13 +366,13 @@ public class TestPerfTasksLogic extends TestCase {
File lineFile = new File(System.getProperty("tempDir"), "test.reuters.lines.txt");
// We will call WriteLineDocs this many times
final int NUM_TRY_DOCS = 500;
final int NUM_TRY_DOCS = 50;
// Creates a line file with first 500 docs from reuters
// Creates a line file with first 50 docs from SingleDocSource
String algLines1[] = {
"# ----- properties ",
"content.source=org.apache.lucene.benchmark.byTask.feeds.ReutersContentSource",
"content.source.forever=false",
"content.source=org.apache.lucene.benchmark.byTask.feeds.SingleDocSource",
"content.source.forever=true",
"line.file.out=" + lineFile.getAbsolutePath().replace('\\', '/'),
"# ----- alg ",
"{WriteLineDoc()}:" + NUM_TRY_DOCS,
@ -380,15 +381,12 @@ public class TestPerfTasksLogic extends TestCase {
// Run algo
Benchmark benchmark = execBenchmark(algLines1);
// Verify we got somewhere between 1-500 lines (some
// Reuters docs have no body, which WriteLineDoc task
// skips).
BufferedReader r = new BufferedReader(new FileReader(lineFile));
int numLines = 0;
while(r.readLine() != null)
numLines++;
r.close();
assertTrue("did not see the right number of docs; should be > 0 and <= " + NUM_TRY_DOCS + " but was " + numLines, numLines > 0 && numLines <= NUM_TRY_DOCS);
assertEquals("did not see the right number of docs; should be " + NUM_TRY_DOCS + " but was " + numLines, NUM_TRY_DOCS, numLines);
// Index the line docs
String algLines2[] = {
@ -426,14 +424,15 @@ public class TestPerfTasksLogic extends TestCase {
public void testReadTokens() throws Exception {
// We will call ReadTokens on this many docs
final int NUM_DOCS = 100;
final int NUM_DOCS = 20;
// Read tokens from first NUM_DOCS docs from Reuters and
// then build index from the same docs
String algLines1[] = {
"# ----- properties ",
"analyzer=org.apache.lucene.analysis.WhitespaceAnalyzer",
"content.source=org.apache.lucene.benchmark.byTask.feeds.ReutersContentSource",
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"# ----- alg ",
"{ReadTokens}: " + NUM_DOCS,
"ResetSystemErase",
@ -481,7 +480,8 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"# ----- properties ",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=3",
"doc.term.vector=false",
"content.source.forever=false",
@ -502,7 +502,7 @@ public class TestPerfTasksLogic extends TestCase {
// 3. test number of docs in the index
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
int ndocsExpected = 2 * 20; // Reuters20ContentSource exhausts after 20 docs.
int ndocsExpected = 2 * 20; // first 20 reuters docs.
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
ir.close();
}
@ -536,27 +536,6 @@ public class TestPerfTasksLogic extends TestCase {
System.out.println(txt);
}
/** use reuters and the exhaust mechanism, but to be faster, add 20 docs only... */
public static class Reuters20ContentSource extends ReutersContentSource {
private int nDocs = 0;
@Override
public synchronized DocData getNextDocData(DocData docData)
throws NoMoreDataException, IOException {
if (nDocs >= 20 && !forever) {
throw new NoMoreDataException();
}
nDocs++;
return super.getNextDocData(docData);
}
@Override
public synchronized void resetInputs() throws IOException {
super.resetInputs();
nDocs = 0;
}
}
/**
* Test that exhaust in loop works as expected (LUCENE-1115).
*/
@ -564,7 +543,8 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"# ----- properties ",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=3",
"doc.term.vector=false",
"content.source.forever=false",
@ -586,7 +566,7 @@ public class TestPerfTasksLogic extends TestCase {
// 3. test number of docs in the index
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
int ndocsExpected = 20; // Reuters20ContentSource exhausts after 20 docs.
int ndocsExpected = 20; // first 20 reuters docs.
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
ir.close();
}
@ -598,7 +578,8 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"# ----- properties ",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"ram.flush.mb=-1",
"max.buffered=2",
"content.source.log.step=3",
@ -622,7 +603,7 @@ public class TestPerfTasksLogic extends TestCase {
// 3. test number of docs in the index
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
int ndocsExpected = 20; // Reuters20ContentSource exhausts after 20 docs.
int ndocsExpected = 20; // first 20 reuters docs.
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
ir.close();
}
@ -642,7 +623,8 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"# ----- properties ",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=3",
"doc.term.vector=false",
"content.source.forever=false",
@ -666,7 +648,7 @@ public class TestPerfTasksLogic extends TestCase {
// 3. test number of docs in the index
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
int ndocsExpected = 20; // Reuters20ContentSource exhausts after 20 docs.
int ndocsExpected = 20; // first 20 reuters docs.
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
ir.close();
}
@ -685,7 +667,8 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"# ----- properties ",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=3",
"ram.flush.mb=-1",
"max.buffered=2",
@ -711,7 +694,7 @@ public class TestPerfTasksLogic extends TestCase {
// 3. test number of docs in the index
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
int ndocsExpected = 20; // Reuters20ContentSource exhausts after 20 docs.
int ndocsExpected = 20; // first 20 reuters docs.
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
ir.close();
}
@ -723,7 +706,8 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"# ----- properties ",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=3",
"ram.flush.mb=-1",
"max.buffered=2",
@ -767,7 +751,8 @@ public class TestPerfTasksLogic extends TestCase {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"# ----- properties ",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=3",
"ram.flush.mb=-1",
"max.buffered=3",
@ -793,7 +778,7 @@ public class TestPerfTasksLogic extends TestCase {
// 3. test number of docs in the index
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
int ndocsExpected = 20; // Reuters20ContentSource exhausts after 20 docs.
int ndocsExpected = 20; // first 20 reuters docs.
assertEquals("wrong number of docs in the index!", ndocsExpected, ir.numDocs());
ir.close();
@ -845,7 +830,8 @@ public class TestPerfTasksLogic extends TestCase {
String dis = disable ? "-" : "";
return new String[] {
"# ----- properties ",
"content.source="+Reuters20ContentSource.class.getName(),
"content.source=org.apache.lucene.benchmark.byTask.feeds.LineDocSource",
"docs.file=" + getReuters20LinesFile(),
"content.source.log.step=30",
"doc.term.vector=false",
"content.source.forever=false",
@ -863,5 +849,8 @@ public class TestPerfTasksLogic extends TestCase {
"RepSumByName",
};
}
private static String getReuters20LinesFile() {
return System.getProperty("lucene.common.dir") + "/contrib/benchmark/src/test/org/apache/lucene/benchmark/reuters.first20.lines.txt";
}
}

View File

@ -0,0 +1,20 @@
BAHIA COCOA REVIEW 19870226200101 Showers continued throughout the week in the Bahia cocoa zone, alleviating the drought since early January and improving prospects for the coming temporao, although normal humidity levels have not been restored, Comissaria Smith said in its weekly review. The dry period means the temporao will be late this year. Arrivals for the week ended February 22 were 155,221 bags of 60 kilos making a cumulative total for the season of 5.93 mln against 5.81 at the same stage last year. Again it seems that cocoa delivered earlier on consignment was included in the arrivals figures. Comissaria Smith said there is still some doubt as to how much old crop cocoa is still available as harvesting has practically come to an end. With total Bahia crop estimates around 6.4 mln bags and sales standing at almost 6.2 mln there are a few hundred thousand bags still in the hands of farmers, middlemen, exporters and processors. There are doubts as to how much of this cocoa would be fit for export as shippers are now experiencing dificulties in obtaining +Bahia superior+ certificates. In view of the lower quality over recent weeks farmers have sold a good part of their cocoa held on consignment. Comissaria Smith said spot bean prices rose to 340 to 350 cruzados per arroba of 15 kilos. Bean shippers were reluctant to offer nearby shipment and only limited sales were booked for March shipment at 1,750 to 1,780 dlrs per tonne to ports to be named. New crop sales were also light and all to open ports with June/July going at 1,850 and 1,880 dlrs and at 35 and 45 dlrs under New York july, Aug/Sept at 1,870, 1,875 and 1,880 dlrs per tonne FOB. Routine sales of butter were made. March/April sold at 4,340, 4,345 and 4,350 dlrs. April/May butter went at 2.27 times New York May, June/July at 4,400 and 4,415 dlrs, Aug/Sept at 4,351 to 4,450 dlrs and at 2.27 and 2.28 times New York Sept and Oct/Dec at 4,480 dlrs and 2.27 times New York Dec, Comissaria Smith said. Destinations were the U.S., Covertible currency areas, Uruguay and open ports. Cake sales were registered at 785 to 995 dlrs for March/April, 785 dlrs for May, 753 dlrs for Aug and 0.39 times New York Dec for Oct/Dec. Buyers were the U.S., Argentina, Uruguay and convertible currency areas. Liquor sales were limited with March/April selling at 2,325 and 2,380 dlrs, June/July at 2,375 dlrs and at 1.25 times New York July, Aug/Sept at 2,400 dlrs and at 1.25 times New York Sept and Oct/Dec at 1.25 times New York Dec, Comissaria Smith said. Total Bahia sales are currently estimated at 6.13 mln bags against the 1986/87 crop and 1.06 mln bags against the 1987/88 crop. Final figures for the period to February 28 are expected to be published by the Brazilian Cocoa Trade Commission after carnival which ends midday on February 27. Reuter &#3;
STANDARD OIL <SRD> TO FORM FINANCIAL UNIT 19870226200220 Standard Oil Co and BP North America Inc said they plan to form a venture to manage the money market borrowing and investment activities of both companies. BP North America is a subsidiary of British Petroleum Co Plc <BP>, which also owns a 55 pct interest in Standard Oil. The venture will be called BP/Standard Financial Trading and will be operated by Standard Oil under the oversight of a joint management committee. Reuter &#3;
COBANCO INC <CBCO> YEAR NET 19870226201859 Shr 34 cts vs 1.19 dlrs Net 807,000 vs 2,858,000 Assets 510.2 mln vs 479.7 mln Deposits 472.3 mln vs 440.3 mln Loans 299.2 mln vs 327.2 mln Note: 4th qtr not available. Year includes 1985 extraordinary gain from tax carry forward of 132,000 dlrs, or five cts per shr. Reuter &#3;
WORLD MARKET PRICE FOR UPLAND COTTON - USDA 19870226213846 The U.S. Agriculture Department announced the prevailing world market price, adjusted to U.S. quality and location, for Strict Low Middling, 1-1/16 inch upland cotton at 52.69 cts per lb, to be in effect through midnight March 5. The adjusted world price is at average U.S. producing locations (near Lubbock, Texas) and will be further adjusted for other qualities and locations. The price will be used in determining First Handler Cotton Certificate payment rates. Based on data for the week ended February 26, the adjusted world price for upland cotton is determined as follows, in cts per lb -- Northern European Price 66.32 Adjustments -- Average U.S. spot mkt location 10.42 SLM 1-1/16 inch cotton 1.80 Average U.S. location 0.53 Sum of adjustments 12.75 Adjusted world price 53.57 Reuter &#3;
SUGAR QUOTA IMPORTS DETAILED -- USDA 19870226213854 The U.S. Agriculture Department said cumulative sugar imports from individual countries during the 1987 quota year, which began January 1, 1987 and ends December 31, 1987 were as follows, with quota allocations for the quota year in short tons, raw value -- CUMULATIVE QUOTA 1987 IMPORTS ALLOCATIONS ARGENTINA nil 39,130 AUSTRALIA nil 75,530 BARBADOS nil 7,500 BELIZE nil 10,010 BOLIVIA nil 7,500 BRAZIL nil 131,950 CANADA nil 18,876 QUOTA 1987 IMPORTS ALLOCATIONS COLOMBIA 103 21,840 CONGO nil 7,599 COSTA RICA nil 17,583 IVORY COAST nil 7,500 DOM REP 5,848 160,160 ECUADOR nil 10,010 EL SALVADOR nil 26,019.8 FIJI nil 25,190 GABON nil 7,500 QUOTA 1987 IMPORTS ALLOCATIONS GUATEMALA nil 43,680 GUYANA nil 10,920 HAITI nil 7,500 HONDURAS nil 15,917.2 INDIA nil 7,500 JAMAICA nil 10,010 MADAGASCAR nil 7,500 MALAWI nil 9,,100 QUOTA 1987 IMPORTS ALLOCATIONS MAURITIUS nil 10,920 MEXICO 37 7,500 MOZAMBIQUE nil 11,830 PANAMA nil 26,390 PAPUA NEW GUINEA nil 7,500 PARAGUAY nil 7,500 PERU nil 37,310 PHILIPPINES nil 143,780 ST.CHRISTOPHER- NEVIS nil 7,500 QUOTA 1987 IMPORTS ALLOCATIONS SWAZILAND nil 14,560 TAIWAN nil 10,920 THAILAND nil 12,740 TRINIDAD-TOBAGO nil 7,500 URUGUAY nil 7,500 ZIMBABWE nil 10,920 Reuter &#3;
GRAIN SHIPS LOADING AT PORTLAND 19870226213903 There were seven grain ships loading and six ships were waiting to load at Portland, according to the Portland Merchants Exchange. Reuter &#3;
IRAN ANNOUNCES END OF MAJOR OFFENSIVE IN GULF WAR 19870226214000 Iran announced tonight that its major offensive against Iraq in the Gulf war had ended after dealing savage blows against the Baghdad government. The Iranian news agency IRNA, in a report received in London, said the operation code-named Karbala-5 launched into Iraq on January 9 was now over. It quoted a joint statewment by the Iranian Army and Revolutionary Guards Corps as saying that their forces had "dealt one of the severest blows on the Iraqi war machine in the history of the Iraq-imposed war." The statement by the Iranian High Command appeared to herald the close of an assault on the port city of Basra in southern Iraq. "The operation was launched at a time when the Baghdad government was spreading extensive propaganda on the resistance power of its army...," said the statement quoted by IRNA. It claimed massive victories in the seven-week offensive and called on supporters of Baghdad to "come to their senses" and discontinue support for what it called the tottering regime in Iraq. Iran said its forces had "liberated" 155 square kilometers of enemy-occupied territory during the 1987 offensive and taken over islands, townships, rivers and part of a road leading into Basra. The Iranian forces "are in full control of these areas," the statement said. It said 81 Iraqi brigades and battalions were totally destroyed, along with 700 tanks and 1,500 other vehicles. The victory list also included 80 warplanes downed, 250 anti- aircraft guns and 400 pieces of military hardware destroyed and the seizure of 220 tanks and armoured personnel carriers. Reuter &#3;
MERIDIAN BANCORP INC <MRDN> SETS REGULAR PAYOUT 19870226214034 Qtly div 25 cts vs 25 cts prior Pay April one Record March 15 Reuter &#3;
U.S. BANK DISCOUNT BORROWINGS 310 MLN DLRS 19870226214134 U.S. bank discount window borrowings less extended credits averaged 310 mln dlrs in the week to Wednesday February 25, the Federal Reserve said. The Fed said that overall borrowings in the week fell 131 mln dlrs to 614 mln dlrs, with extended credits up 10 mln dlrs at 304 mln dlrs. The week was the second half of a two-week statement period. Net borrowings in the prior week averaged 451 mln dlrs. Commenting on the two-week statement period ended February 25, the Fed said that banks had average net free reserves of 644 mln dlrs a day, down from 1.34 billion two weeks earlier. A Federal Reserve spokesman told a press briefing that there were no large single day net misses in the Fed's reserve projections in the week to Wednesday. He said that natural float had been "acting a bit strangely" for this time of year, noting that there had been poor weather during the latest week. The spokesman said that natural float ranged from under 500 mln dlrs on Friday, for which he could give no reason, to nearly one billion dlrs on both Thursday and Wednesday. The Fed spokeman could give no reason for Thursday's high float, but he said that about 750 mln dlrs of Wednesday's float figure was due to holdover and transportation float at two widely separated Fed districts. For the week as a whole, he said that float related as of adjustments were "small," adding that they fell to a negative 750 mln dlrs on Tuesday due to a number of corrections for unrelated cash letter errors in six districts around the country. The spokesman said that on both Tuesday and Wednesday, two different clearing banks had system problems and the securities and Federal funds wires had to be held open until about 2000 or 2100 EST on both days. However, he said that both problems were cleared up during both afternoons and there was no evidence of any reserve impact. During the week ended Wednesday, 45 pct of net discount window borrowings were made by the smallest banks, with 30 pct by the 14 large money center banks and 25 pct by large regional institutions. On Wednesday, 55 pct of the borrowing was accounted for by the money center banks, with 30 pct by the large regionals and 15 pct by the smallest banks. The Fed spokesman said the banking system had excess reserves on Thursday, Monday and Tuesday and a deficit on Friday and Wedndsday. That produced a small daily average deficit for the week as a whole. For the two-week period, he said there were relatively high excess reserves on a daily avearge, almost all of which were at the smallest banks. Reuter &#3;
AMERICAN EXPRESS <AXP> SEEN IN POSSIBLE SPINNOFF 19870226214313 American Express Co remained silent on market rumors it would spinoff all or part of its Shearson Lehman Brothers Inc, but some analysts said the company may be considering such a move because it is unhappy with the market value of its stock. American Express stock got a lift from the rumor, as the market calculated a partially public Shearson may command a good market value, thereby boosting the total value of American Express. The rumor also was accompanied by talk the financial services firm would split its stock and boost its dividend. American Express closed on the New York Stock Exchange at 72-5/8, up 4-1/8 on heavy volume. American Express would not comment on the rumors or its stock activity. Analysts said comments by the company at an analysts' meeting Tuesday helped fuel the rumors as did an announcement yesterday of management changes. At the meeting, company officials said American Express stock is undervalued and does not fully reflect the performance of Shearson, according to analysts. Yesterday, Shearson said it was elevating its chief operating officer, Jeffery Lane, to the added position of president, which had been vacant. It also created four new positions for chairmen of its operating divisions. Analysts speculated a partial spinoff would make most sense, contrary to one variation on market rumors of a total spinoff. Some analysts, however, disagreed that any spinoff of Shearson would be good since it is a strong profit center for American Express, contributing about 20 pct of earnings last year. "I think it is highly unlikely that American Express is going to sell shearson," said Perrin Long of Lipper Analytical. He questioned what would be a better investment than "a very profitable securities firm." Several analysts said American Express is not in need of cash, which might be the only reason to sell a part of a strong asset. But others believe the company could very well of considered the option of spinning out part of Shearson, and one rumor suggests selling about 20 pct of it in the market. Larry Eckenfelder of Prudential-Bache Securities said he believes American Express could have considered a partial spinoff in the past. "Shearson being as profitable as it is would have fetched a big premium in the market place. Shearson's book value is in the 1.4 mln dlr range. Shearson in the market place would probably be worth three to 3.5 bilion dlrs in terms of market capitalization," said Eckenfelder. Some analysts said American Express could use capital since it plans to expand globally. "They have enormous internal growth plans that takes capital. You want your stock to reflect realistic valuations to enhance your ability to make all kinds of endeavors down the road," said E.F. Hutton Group analyst Michael Lewis. "They've outlined the fact that they're investing heavily in the future, which goes heavily into the international arena," said Lewis. "...That does not preclude acquisitions and divestitures along the way," he said. Lewis said if American Express reduced its exposure to the brokerage business by selling part of shearson, its stock might better reflect other assets, such as the travel related services business. "It could find its true water mark with a lesser exposure to brokerage. The value of the other components could command a higher multiple because they constitute a higher percentage of the total operating earnings of the company," he said. Lewis said Shearson contributed 316 mln in after-tax operating earnings, up from about 200 mln dlrs in 1985. Reuter &#3;
OHIO MATTRESS <OMT> MAY HAVE LOWER 1ST QTR NET 19870226201915 Ohio Mattress Co said its first quarter, ending February 28, profits may be below the 2.4 mln dlrs, or 15 cts a share, earned in the first quarter of fiscal 1986. The company said any decline would be due to expenses related to the acquisitions in the middle of the current quarter of seven licensees of Sealy Inc, as well as 82 pct of the outstanding capital stock of Sealy. Because of these acquisitions, it said, first quarter sales will be substantially higher than last year's 67.1 mln dlrs. Noting that it typically reports first quarter results in late march, said the report is likely to be issued in early April this year. It said the delay is due to administrative considerations, including conducting appraisals, in connection with the acquisitions. Reuter &#3;
U.S. M-1 MONEY SUPPLY ROSE 2.1 BILLION DLRS 19870226214435 U.S. M-1 money supply rose 2.1 billion dlrs to a seasonally adjusted 736.7 billion dlrs in the February 16 week, the Federal Reserve said. The previous week's M-1 level was revised to 734.6 billion dlrs from 734.2 billion dlrs, while the four-week moving average of M-1 rose to 735.0 billion dlrs from 733.5 billion. Economists polled by Reuters said that M-1 should be anywhere from down four billion dlrs to up 2.3 billion dlrs. The average forecast called for a 300 mln dlr M-1 rise. Reuter &#3;
GENERAL BINDING <GBND> IN MARKETING AGREEMENT 19870226214508 General Binding Corp said it reached a marketing agreement with Varitronic Systems Inc, a manufacturer and marketer of electronic lettering systems. Under terms of the agreement, General Binding will carry Varitronics' Merlin Express Presentation Lettering System, a portable, battery-operated lettering system which produces type on adhesive-backed tape. Reuter &#3;
LIBERTY ALL-STAR <USA> SETS INITIAL PAYOUT 19870226214544 Liberty All-Star Equity Fund said it declared an initial dividend of five cts per share, payable April two to shareholders of record March 20. It said the dividend includes a quarterly dividend of three cts a share and a special payout of two cts a share, which covers the period from November three, 1986, when the fund began operations, to December 31, 1986. The fund said its quarterly dividend rate may fluctuate in the future. Reuter &#3;
COCA COLA <KO> UNIT AND WORLD FILM IN VENTURE 19870226214745 Coca-Cola Co's Entertainment Business Sector Inc unit said it formed a joint venture with an affiliate of World Film Services to acquire, produce and distribute television programming around the world. World Film Services was formed by chairman John Heyman in 1963 to produce films. Reuter &#3;
FORD MOTOR CREDIT <F> TO REDEEM DEBENTURES 19870226214753 Ford Motor Co said its Ford Motor Credit Co on April One will redeem 4.0 mln dlrs of its 8.70 pct debentures due April 1, 1999. It said the debentures are redeemable at a price of 100 pct of the principal. Because April 1, 1987 is an interest payment date on the debentures, no accrued interest will be payable on the redemption date as part of the redemption proceeds. Debentures will be selected for redemption on a pro rata basis, Ford said. Reuter &#3;
STERLING SOFTWARE <SSW> NOTE HOLDERS OK BUY 19870226214802 Sterling Software Inc said it received consent of a majority of the holders of its eight pct convertible sernior subordinated debentures required to purchase shares of its common. The company said it may now buy its stock at its discretion depending on market conditions. Reuter &#3;
<SCHULT HOMES CORP> MAKES INITIAL STOCK OFFER 19870226214818 Schult Homes Corp announced an initial public offering of 833,334 units at five dlrs per unit, said Janney Montgomery Scott Inc and Woolcott and Co, managing underwriters of the offering. They said each unit consists of one common share and one warrant to buy one-half share of common. The warrant will entitle holders to buy one-half common share at 5.50 dlrs per full share from March one, 1988, to September one, 1989, and thereafter at 6.50 dlrs per full share until March 1991, they said. Reuter &#3;
FLUOR <FLR> UNIT GETS CONSTRUCTION CONTRACT 19870226214826 Fluor Corp said its Fluor Daniel unit received a contract from Union Carbide Corp <UK> covering design, procurement and construction of a 108 megawatt combined cycle cogeneration facility in Seadrift, Texas. The value of the contract was not disclosed. Reuter &#3;
SUFFIELD FINANCIAL CORP <SFCP> SELLS STOCK 19870226214835 Suffield Financial Corp said Jon Googel and Benjamin Sisti of Colonial Realty, West Hartford, Conn., purchased 175,900 shares of its stock for 3,416,624. The company said the purchase equals 5.2 pct of its outstanding shares. Reuter &#3;

View File

@ -37,7 +37,6 @@ import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.analysis.StopAnalyzer;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -50,7 +49,6 @@ import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.index.TermDocs;
@ -306,34 +304,47 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
Document doc = createDocument(text);
if (verbose) System.out.println("\n*********** FILE=" + file);
boolean measureIndexing = false; // toggle this to measure query performance
MemoryIndex memind = null;
IndexSearcher memsearcher = null;
if (useMemIndex && !measureIndexing) {
memind = createMemoryIndex(doc);
memsearcher = memind.createSearcher();
}
if (first) {
IndexSearcher s = memind.createSearcher();
TermDocs td = s.getIndexReader().termDocs(null);
assertTrue(td.next());
assertEquals(0, td.doc());
assertEquals(1, td.freq());
td.close();
s.close();
first = false;
}
RAMDirectory ramind = null;
IndexSearcher ramsearcher = null;
if (useRAMIndex && !measureIndexing) {
ramind = createRAMIndex(doc);
ramsearcher = new IndexSearcher(ramind);
}
for (int q=0; q < queries.length; q++) {
try {
Query query = parseQuery(queries[q]);
boolean measureIndexing = false; // toggle this to measure query performance
MemoryIndex memind = null;
if (useMemIndex && !measureIndexing) memind = createMemoryIndex(doc);
if (first) {
IndexSearcher s = memind.createSearcher();
TermDocs td = s.getIndexReader().termDocs(null);
assertTrue(td.next());
assertEquals(0, td.doc());
assertEquals(1, td.freq());
td.close();
s.close();
first = false;
}
RAMDirectory ramind = null;
if (useRAMIndex && !measureIndexing) ramind = createRAMIndex(doc);
for (int run=0; run < runs; run++) {
float score1 = 0.0f; float score2 = 0.0f;
if (useMemIndex && measureIndexing) memind = createMemoryIndex(doc);
if (useMemIndex) score1 = query(memind, query);
if (useRAMIndex && measureIndexing) ramind = createRAMIndex(doc);
if (useRAMIndex) score2 = query(ramind, query);
if (useMemIndex && measureIndexing) {
memind = createMemoryIndex(doc);
memsearcher = memind.createSearcher();
}
if (useMemIndex) score1 = query(memsearcher, query);
if (useRAMIndex && measureIndexing) {
ramind = createRAMIndex(doc);
ramsearcher = new IndexSearcher(ramind);
}
if (useRAMIndex) score2 = query(ramsearcher, query);
if (useMemIndex && useRAMIndex) {
if (verbose) System.out.println("diff="+ (score1-score2) + ", query=" + queries[q] + ", s1=" + score1 + ", s2=" + score2);
if (score1 != score2 || score1 < 0.0f || score2 < 0.0f || score1 > 1.0f || score2 > 1.0f) {
@ -418,17 +429,12 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
}
}
}
private float query(Object index, Query query) {
// System.out.println("MB=" + (getMemorySize(index) / (1024.0f * 1024.0f)));
Searcher searcher = null;
try {
if (index instanceof Directory)
searcher = new IndexSearcher((Directory)index, true);
else
searcher = ((MemoryIndex) index).createSearcher();
final float[] scores = new float[1]; // inits to 0.0f (no match)
final float[] scores = new float[1]; // inits to 0.0f (no match)
private float query(IndexSearcher searcher, Query query) {
// System.out.println("MB=" + (getMemorySize(index) / (1024.0f * 1024.0f)));
try {
searcher.search(query, new Collector() {
private Scorer scorer;
@ -456,12 +462,6 @@ public class MemoryIndexTest extends BaseTokenStreamTestCase {
return score;
} catch (IOException e) { // should never happen (RAMDirectory)
throw new RuntimeException(e);
} finally {
try {
if (searcher != null) searcher.close();
} catch (IOException e) { // should never happen (RAMDirectory)
throw new RuntimeException(e);
}
}
}

View File

@ -108,8 +108,8 @@ public class TestSnapshotDeletionPolicy extends LuceneTestCase
}
private void runTest(Directory dir) throws Exception {
// Run for ~7 seconds
final long stopTime = System.currentTimeMillis() + 7000;
// Run for ~1 seconds
final long stopTime = System.currentTimeMillis() + 1000;
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);

View File

@ -49,7 +49,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
private static abstract class TimedThread extends Thread {
boolean failed;
int count;
private static int RUN_TIME_SEC = 3;
private static float RUN_TIME_SEC = 0.5f;
private TimedThread[] allThreads;
abstract public void doWork() throws Throwable;
@ -60,7 +60,7 @@ public class TestAtomicUpdate extends LuceneTestCase {
@Override
public void run() {
final long stopTime = System.currentTimeMillis() + 1000*RUN_TIME_SEC;
final long stopTime = System.currentTimeMillis() + (long) (1000*RUN_TIME_SEC);
count = 0;

View File

@ -699,7 +699,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
public void testThreadSafety() throws Exception {
final Directory dir = new MockRAMDirectory();
final int n = 150;
final int n = 30;
IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < n; i++) {
@ -741,7 +741,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
final Set readersToClose = Collections.synchronizedSet(new HashSet());
for (int i = 0; i < n; i++) {
if (i % 10 == 0) {
if (i % 2 == 0) {
IndexReader refreshed = reader.reopen();
if (refreshed != reader) {
readersToClose.add(reader);
@ -754,7 +754,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
ReaderThreadTask task;
if (i < 20 ||( i >=50 && i < 70) || i > 90) {
if (i < 4 ||( i >=10 && i < 14) || i > 18) {
task = new ReaderThreadTask() {
@Override
@ -825,7 +825,7 @@ public class TestIndexReaderReopen extends LuceneTestCase {
}
synchronized(this) {
wait(15000);
wait(1000);
}
for (int i = 0; i < n; i++) {

View File

@ -435,8 +435,8 @@ public class TestIndexWriter extends LuceneTestCase {
dir.close();
// Try again with 2000 more bytes of free space:
diskFree += 2000;
// Try again with 5000 more bytes of free space:
diskFree += 5000;
}
}
@ -2202,7 +2202,7 @@ public class TestIndexWriter extends LuceneTestCase {
int idUpto = 0;
int fullCount = 0;
final long stopTime = System.currentTimeMillis() + 500;
final long stopTime = System.currentTimeMillis() + 200;
while(System.currentTimeMillis() < stopTime) {
try {
@ -2249,7 +2249,7 @@ public class TestIndexWriter extends LuceneTestCase {
public void testCloseWithThreads() throws Exception {
int NUM_THREADS = 3;
for(int iter=0;iter<20;iter++) {
for(int iter=0;iter<7;iter++) {
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
@ -2432,7 +2432,7 @@ public class TestIndexWriter extends LuceneTestCase {
int NUM_THREADS = 3;
for(int iter=0;iter<5;iter++) {
for(int iter=0;iter<2;iter++) {
MockRAMDirectory dir = new MockRAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.LIMITED);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();

View File

@ -63,7 +63,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
final Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.add(idField);
final long stopTime = System.currentTimeMillis() + 3000;
final long stopTime = System.currentTimeMillis() + 500;
while(System.currentTimeMillis() < stopTime) {
doFail.set(this);

View File

@ -271,7 +271,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
}
public void testAddIndexesAndDoDeletesThreads() throws Throwable {
final int numIter = 5;
final int numIter = 2;
int numDirs = 3;
Directory mainDir = new MockRAMDirectory();
@ -720,7 +720,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
IndexReader r = writer.getReader();
final int NUM_THREAD = 5;
final float SECONDS = 3;
final float SECONDS = 0.5f;
final long endTime = (long) (System.currentTimeMillis() + 1000.*SECONDS);
final List excs = Collections.synchronizedList(new ArrayList());
@ -784,7 +784,7 @@ public class TestIndexWriterReader extends LuceneTestCase {
IndexReader r = writer.getReader();
final int NUM_THREAD = 5;
final float SECONDS = 3;
final float SECONDS = 0.5f;
final long endTime = (long) (System.currentTimeMillis() + 1000.*SECONDS);
final List excs = Collections.synchronizedList(new ArrayList());

View File

@ -46,7 +46,7 @@ public class TestNRTReaderWithThreads extends LuceneTestCase {
indexThreads[x].start();
}
long startTime = System.currentTimeMillis();
long duration = 5*1000;
long duration = 1000;
while ((System.currentTimeMillis() - startTime) < duration) {
Thread.sleep(100);
}

View File

@ -33,7 +33,7 @@ public class TestStressIndexing extends LuceneTestCase {
private static abstract class TimedThread extends Thread {
boolean failed;
int count;
private static int RUN_TIME_SEC = 6;
private static int RUN_TIME_SEC = 1;
private TimedThread[] allThreads;
abstract public void doWork() throws Throwable;

View File

@ -58,7 +58,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
Directory dir = new MockRAMDirectory();
// TODO: verify equals using IW.getReader
DocsAndWriter dw = indexRandomIWReader(10, 100, 100, dir);
DocsAndWriter dw = indexRandomIWReader(10, 10, 100, dir);
IndexReader r = dw.writer.getReader();
dw.writer.commit();
verifyEquals(r, dir, "id");
@ -73,7 +73,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
// dir1 = FSDirectory.open("foofoofoo");
Directory dir2 = new MockRAMDirectory();
// mergeFactor=2; maxBufferedDocs=2; Map docs = indexRandom(1, 3, 2, dir1);
Map docs = indexRandom(10, 100, 100, dir1);
Map docs = indexRandom(10, 10, 100, dir1);
indexSerial(docs, dir2);
// verifying verify
@ -86,7 +86,7 @@ public class TestStressIndexing2 extends LuceneTestCase {
public void testMultiConfig() throws Throwable {
// test lots of smaller different params together
r = newRandom();
for (int i=0; i<100; i++) { // increase iterations for better testing
for (int i=0; i<20; i++) { // increase iterations for better testing
sameFieldOrder=r.nextBoolean();
mergeFactor=r.nextInt(3)+2;
maxBufferedDocs=r.nextInt(3)+2;

View File

@ -39,7 +39,7 @@ public class TestTransactions extends LuceneTestCase
private static abstract class TimedThread extends Thread {
boolean failed;
private static int RUN_TIME_SEC = 6;
private static float RUN_TIME_SEC = 0.5f;
private TimedThread[] allThreads;
abstract public void doWork() throws Throwable;
@ -50,7 +50,7 @@ public class TestTransactions extends LuceneTestCase
@Override
public void run() {
final long stopTime = System.currentTimeMillis() + 1000*RUN_TIME_SEC;
final long stopTime = System.currentTimeMillis() + (long) (1000*RUN_TIME_SEC);
try {
while(System.currentTimeMillis() < stopTime && !anyErrors())

View File

@ -158,9 +158,6 @@ public class CheckHits {
Searcher searcher,
int[] results)
throws IOException {
if (searcher instanceof IndexSearcher) {
QueryUtils.check(query,searcher);
}
ScoreDoc[] hits = searcher.search(query, null, 1000).scoreDocs;

View File

@ -208,7 +208,7 @@ public class TestBoolean2 extends LuceneTestCase {
try {
// increase number of iterations for more complete testing
for (int i=0; i<1000; i++) {
for (int i=0; i<50; i++) {
int level = rnd.nextInt(3);
q1 = randBoolQuery(new Random(rnd.nextLong()), rnd.nextBoolean(), level, field, vals, null);

View File

@ -316,7 +316,7 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
// increase number of iterations for more complete testing
for (int i=0; i<1000; i++) {
for (int i=0; i<50; i++) {
int lev = rnd.nextInt(maxLev);
final long seed = rnd.nextLong();
BooleanQuery q1 = TestBoolean2.randBoolQuery(new Random(seed), true, lev, field, vals, null);
@ -331,10 +331,10 @@ public class TestBooleanMinShouldMatch extends LuceneTestCase {
// will not normalize scores.
TopDocs top1 = s.search(q1,null,100);
TopDocs top2 = s.search(q2,null,100);
QueryUtils.check(q1,s);
QueryUtils.check(q2,s);
if (i < 100) {
QueryUtils.check(q1,s);
QueryUtils.check(q2,s);
}
// The constrained query
// should be a superset to the unconstrained query.
if (top2.totalHits > top1.totalHits) {

View File

@ -229,7 +229,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
final Random rnd=newRandom();
String field="field"+precisionStep;
int termCountT=0,termCountC=0;
for (int i=0; i<50; i++) {
for (int i=0; i<10; i++) {
int lower=(int)(rnd.nextDouble()*noDocs*distance)+startOffset;
int upper=(int)(rnd.nextDouble()*noDocs*distance)+startOffset;
if (lower>upper) {
@ -272,8 +272,8 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
assertEquals("Total number of terms should be equal for unlimited precStep", termCountT, termCountC);
} else {
System.out.println("Average number of terms during random search on '" + field + "':");
System.out.println(" Trie query: " + (((double)termCountT)/(50*4)));
System.out.println(" Classical query: " + (((double)termCountC)/(50*4)));
System.out.println(" Trie query: " + (((double)termCountT)/(10*4)));
System.out.println(" Classical query: " + (((double)termCountC)/(10*4)));
}
}
@ -296,8 +296,8 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
private void testRangeSplit(int precisionStep) throws Exception {
final Random rnd=newRandom();
String field="ascfield"+precisionStep;
// 50 random tests
for (int i=0; i<50; i++) {
// 10 random tests
for (int i=0; i<10; i++) {
int lower=(int)(rnd.nextDouble()*noDocs - noDocs/2);
int upper=(int)(rnd.nextDouble()*noDocs - noDocs/2);
if (lower>upper) {

View File

@ -244,7 +244,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
final Random rnd=newRandom();
String field="field"+precisionStep;
int termCountT=0,termCountC=0;
for (int i=0; i<50; i++) {
for (int i=0; i<10; i++) {
long lower=(long)(rnd.nextDouble()*noDocs*distance)+startOffset;
long upper=(long)(rnd.nextDouble()*noDocs*distance)+startOffset;
if (lower>upper) {
@ -287,8 +287,8 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
assertEquals("Total number of terms should be equal for unlimited precStep", termCountT, termCountC);
} else {
System.out.println("Average number of terms during random search on '" + field + "':");
System.out.println(" Trie query: " + (((double)termCountT)/(50*4)));
System.out.println(" Classical query: " + (((double)termCountC)/(50*4)));
System.out.println(" Trie query: " + (((double)termCountT)/(10*4)));
System.out.println(" Classical query: " + (((double)termCountC)/(10*4)));
}
}
@ -315,8 +315,8 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
private void testRangeSplit(int precisionStep) throws Exception {
final Random rnd=newRandom();
String field="ascfield"+precisionStep;
// 50 random tests
for (int i=0; i<50; i++) {
// 10 random tests
for (int i=0; i<10; i++) {
long lower=(long)(rnd.nextDouble()*noDocs - noDocs/2);
long upper=(long)(rnd.nextDouble()*noDocs - noDocs/2);
if (lower>upper) {

View File

@ -40,7 +40,7 @@ import org.apache.lucene.util.ThreadInterruptedException;
* and (3) a sanity test with multiple searching threads.
*/
public class TestTimeLimitingCollector extends LuceneTestCase {
private static final int SLOW_DOWN = 47;
private static final int SLOW_DOWN = 3;
private static final long TIME_ALLOWED = 17 * SLOW_DOWN; // so searches can find about 17 docs.
// max time allowed is relaxed for multithreading tests.

View File

@ -206,7 +206,13 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
assertEquals("queries should have same #hits",h1.size(),h3CustomMul.size());
assertEquals("queries should have same #hits",h1.size(),h4CustomAdd.size());
assertEquals("queries should have same #hits",h1.size(),h5CustomMulAdd.size());
QueryUtils.check(q1,s);
QueryUtils.check(q2,s);
QueryUtils.check(q3,s);
QueryUtils.check(q4,s);
QueryUtils.check(q5,s);
// verify scores ratios
for (Iterator it = h1.keySet().iterator(); it.hasNext();) {
Integer x = (Integer) it.next();
@ -240,7 +246,6 @@ public class TestCustomScoreQuery extends FunctionTestSetup {
}
private void logResult(String msg, IndexSearcher s, Query q, int doc, float score1) throws IOException {
QueryUtils.check(q,s);
log(msg+" "+score1);
log("Explain by: "+q);
log(s.explain(q,doc));

View File

@ -58,7 +58,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
}
}
private static final long TEST_FILE_LENGTH = 1024*1024;
private static final long TEST_FILE_LENGTH = 100*1024;
// Call readByte() repeatedly, past the buffer boundary, and see that it
// is working as expected.
@ -94,15 +94,10 @@ public class TestBufferedIndexInput extends LuceneTestCase {
// run test with chunk size of 10 bytes
runReadBytesAndClose(new SimpleFSIndexInput(tmpInputFile,
inputBufferSize, 10), inputBufferSize, r);
// run test with chunk size of 100 MB - default
runReadBytesAndClose(new SimpleFSIndexInput(tmpInputFile,
inputBufferSize, FSDirectory.DEFAULT_READ_CHUNK_SIZE), inputBufferSize, r);
// run test with chunk size of 10 bytes
runReadBytesAndClose(new NIOFSIndexInput(tmpInputFile,
inputBufferSize, 10), inputBufferSize, r);
// run test with chunk size of 100 MB - default
runReadBytesAndClose(new NIOFSIndexInput(tmpInputFile,
inputBufferSize, FSDirectory.DEFAULT_READ_CHUNK_SIZE), inputBufferSize, r);
}
private void runReadBytesAndClose(IndexInput input, int bufferSize, Random r)
@ -129,7 +124,7 @@ public class TestBufferedIndexInput extends LuceneTestCase {
}
}
// wildly fluctuating size:
for (long i = 0; i < 1000; i++) {
for (long i = 0; i < 100; i++) {
final int size = r.nextInt(10000);
checkReadBytes(input, 1+size, pos);
pos += 1+size;
@ -172,10 +167,10 @@ public class TestBufferedIndexInput extends LuceneTestCase {
}
}
// This tests that attempts to readBytes() past an EOF will fail, while
// reads up to the EOF will succeed. The EOF is determined by the
// BufferedIndexInput's arbitrary length() value.
public void testEOF() throws Exception {
// This tests that attempts to readBytes() past an EOF will fail, while
// reads up to the EOF will succeed. The EOF is determined by the
// BufferedIndexInput's arbitrary length() value.
public void testEOF() throws Exception {
MyBufferedIndexInput input = new MyBufferedIndexInput(1024);
// see that we can read all the bytes at one go:
checkReadBytes(input, (int)input.length(), 0);
@ -223,25 +218,25 @@ public class TestBufferedIndexInput extends LuceneTestCase {
// an infinite file
this(Long.MAX_VALUE);
}
@Override
protected void readInternal(byte[] b, int offset, int length) throws IOException {
for(int i=offset; i<offset+length; i++)
b[i] = byten(pos++);
}
@Override
protected void readInternal(byte[] b, int offset, int length) throws IOException {
for(int i=offset; i<offset+length; i++)
b[i] = byten(pos++);
}
@Override
protected void seekInternal(long pos) throws IOException {
this.pos = pos;
}
@Override
protected void seekInternal(long pos) throws IOException {
this.pos = pos;
}
@Override
public void close() throws IOException {
}
@Override
public void close() throws IOException {
}
@Override
public long length() {
return len;
}
@Override
public long length() {
return len;
}
}
public void testSetBufferSize() throws IOException {

View File

@ -24,7 +24,6 @@ import java.util.Random;
import junit.framework.TestCase;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldCache.CacheEntry;
@ -203,6 +202,7 @@ public abstract class LuceneTestCase extends TestCase {
@Override
public void runBare() throws Throwable {
//long t0 = System.currentTimeMillis();
try {
seed = null;
super.runBare();
@ -212,6 +212,8 @@ public abstract class LuceneTestCase extends TestCase {
}
throw e;
}
//long t = System.currentTimeMillis() - t0;
//System.out.println(t + " msec for " + getName());
}
// recorded seed

View File

@ -22,7 +22,7 @@ import java.nio.CharBuffer;
import java.nio.ByteBuffer;
public class TestIndexableBinaryStringTools extends LuceneTestCase {
private static final int NUM_RANDOM_TESTS = 20000;
private static final int NUM_RANDOM_TESTS = 2000;
private static final int MAX_RANDOM_BINARY_LENGTH = 300;
public void testSingleBinaryRoundTrip() {