LUCENE-1617: add -Dtestpackage=XXX and -Dtestpackageroot=lucene to run a package's tests

git-svn-id: https://svn.apache.org/repos/asf/lucene/java/trunk@769522 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael McCandless 2009-04-28 20:23:52 +00:00
parent 9242c2b6e1
commit f3189d8063
7 changed files with 16 additions and 695 deletions

View File

@ -364,8 +364,15 @@
<attribute name="dataDir"/>
<attribute name="tempDir"/>
<element name="contrib-settings" optional="yes"/>
<sequential>
<sequential>
<condition property="runall">
<not><or>
<isset property="testcase" />
<isset property="testpackage" />
<isset property="testpackageroot" />
</or></not>
</condition>
<mkdir dir="@{junit.output.dir}"/>
<junit printsummary="off" haltonfailure="no" maxmemory="512M"
errorProperty="tests.failed" failureProperty="tests.failed">
@ -386,9 +393,15 @@
<formatter type="xml"/>
<formatter type="brief" usefile="false"/>
<batchtest fork="yes" todir="@{junit.output.dir}" unless="testcase">
<batchtest fork="yes" todir="@{junit.output.dir}" if="runall">
<fileset dir="@{dataDir}" includes="${junit.includes}" excludes="${junit.excludes}"/>
</batchtest>
<batchtest fork="yes" todir="@{junit.output.dir}" if="testpackage">
<fileset dir="@{dataDir}" includes="**/${testpackage}/**/Test*.java,**/${testpackage}/**/*Test.java" excludes="${junit.excludes}"/>
</batchtest>
<batchtest fork="yes" todir="@{junit.output.dir}" if="testpackageroot">
<fileset dir="@{dataDir}" includes="**/${testpackageroot}/Test*.java,**/${testpackageroot}/*Test.java" excludes="${junit.excludes}"/>
</batchtest>
<batchtest fork="yes" todir="@{junit.output.dir}" if="testcase">
<fileset dir="@{dataDir}" includes="**/${testcase}.java"/>
</batchtest>
@ -574,15 +587,3 @@
</project>

View File

@ -1,95 +0,0 @@
package org.apache.lucene;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringReader;
import java.util.Date;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
class AnalysisTest {
static File tmpFile;
public static void main(String[] args) {
try {
test("This is a test", true);
tmpFile = File.createTempFile("words", ".txt");
test(tmpFile, false);
} catch (Exception e) {
System.out.println(" caught a " + e.getClass() +
"\n with message: " + e.getMessage());
}
tmpFile.deleteOnExit();
}
static void test(File file, boolean verbose)
throws Exception {
long bytes = file.length();
System.out.println(" Reading test file containing " + bytes + " bytes.");
FileInputStream is = new FileInputStream(file);
BufferedReader ir = new BufferedReader(new InputStreamReader(is));
test(ir, verbose, bytes);
ir.close();
}
static void test(String text, boolean verbose) throws Exception {
System.out.println(" Tokenizing string: " + text);
test(new StringReader(text), verbose, text.length());
}
static void test(Reader reader, boolean verbose, long bytes)
throws Exception {
Analyzer analyzer = new SimpleAnalyzer();
TokenStream stream = analyzer.tokenStream(null, reader);
Date start = new Date();
int count = 0;
stream.reset();
TermAttribute termAtt = (TermAttribute) stream.getAttribute(TermAttribute.class);
OffsetAttribute offsetAtt = (OffsetAttribute) stream.getAttribute(OffsetAttribute.class);
while (stream.incrementToken()) {
if (verbose) {
System.out.println("Text=" + termAtt.term()
+ " start=" + offsetAtt.startOffset()
+ " end=" + offsetAtt.endOffset());
}
count++;
}
Date end = new Date();
long time = end.getTime() - start.getTime();
System.out.println(time + " milliseconds to extract " + count + " tokens");
System.out.println((time*1000.0)/count + " microseconds/token");
System.out.println((bytes * 1000.0 * 60.0 * 60.0)/(time * 1000000.0)
+ " megabytes/hour");
}
}

View File

@ -1,77 +0,0 @@
package org.apache.lucene;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.demo.FileDocument;
import java.io.File;
import java.util.Date;
class IndexTest {
public static void main(String[] args) {
try {
Date start = new Date();
IndexWriter writer = new IndexWriter(File.createTempFile("luceneTest", "idx"),
new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(20);
indexDocs(writer, new File("/tmp"));
writer.optimize();
writer.close();
Date end = new Date();
System.out.print(end.getTime() - start.getTime());
System.out.println(" total milliseconds");
Runtime runtime = Runtime.getRuntime();
System.out.print(runtime.freeMemory());
System.out.println(" free memory before gc");
System.out.print(runtime.totalMemory());
System.out.println(" total memory before gc");
runtime.gc();
System.out.print(runtime.freeMemory());
System.out.println(" free memory after gc");
System.out.print(runtime.totalMemory());
System.out.println(" total memory after gc");
} catch (Exception e) {
System.out.println(" caught a " + e.getClass() +
"\n with message: " + e.getMessage());
}
}
public static void indexDocs(IndexWriter writer, File file)
throws Exception {
if (file.isDirectory()) {
String[] files = file.list();
for (int i = 0; i < files.length; i++)
indexDocs(writer, new File(file, files[i]));
} else {
System.out.println("adding " + file);
writer.addDocument(FileDocument.Document(file));
}
}
}

View File

@ -1,97 +0,0 @@
package org.apache.lucene;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.GregorianCalendar;
import org.apache.lucene.store.*;
import org.apache.lucene.document.*;
import org.apache.lucene.analysis.*;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.queryParser.*;
class SearchTest {
public static void main(String[] args) {
try {
Directory directory = new RAMDirectory();
Analyzer analyzer = new SimpleAnalyzer();
IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
String[] docs = {
"a b c d e",
"a b c d e a b c d e",
"a b c d e f g h i j",
"a c e",
"e c a",
"a c e a c e",
"a c e a b c"
};
for (int j = 0; j < docs.length; j++) {
Document d = new Document();
d.add(new Field("contents", docs[j], Field.Store.YES, Field.Index.ANALYZED));
writer.addDocument(d);
}
writer.close();
Searcher searcher = new IndexSearcher(directory);
String[] queries = {
// "a b",
// "\"a b\"",
// "\"a b c\"",
// "a c",
// "\"a c\"",
"\"a c e\"",
};
QueryParser parser = new QueryParser("contents", analyzer);
parser.setPhraseSlop(4);
for (int j = 0; j < queries.length; j++) {
Query query = parser.parse(queries[j]);
System.out.println("Query: " + query.toString("contents"));
//DateFilter filter =
// new DateFilter("modified", Time(1997,0,1), Time(1998,0,1));
//DateFilter filter = DateFilter.Before("modified", Time(1997,00,01));
//System.out.println(filter);
ScoreDoc[] hits = searcher.search(query, null, docs.length).scoreDocs;
System.out.println(hits.length + " total results");
for (int i = 0 ; i < hits.length && i < 10; i++) {
Document d = searcher.doc(hits[i].doc);
System.out.println(i + " " + hits[i].score
// + " " + DateField.stringToDate(d.get("modified"))
+ " " + d.get("contents"));
}
}
searcher.close();
} catch (Exception e) {
System.out.println(" caught a " + e.getClass() +
"\n with message: " + e.getMessage());
}
}
static long Time(int year, int month, int day) {
GregorianCalendar calendar = new GregorianCalendar();
calendar.set(year, month, day);
return calendar.getTime().getTime();
}
}

View File

@ -1,103 +0,0 @@
package org.apache.lucene;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Searcher;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
class SearchTestForDuplicates {
static final String PRIORITY_FIELD ="priority";
static final String ID_FIELD ="id";
static final String HIGH_PRIORITY ="high";
static final String MED_PRIORITY ="medium";
static final String LOW_PRIORITY ="low";
public static void main(String[] args) {
try {
Directory directory = new RAMDirectory();
Analyzer analyzer = new SimpleAnalyzer();
IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
final int MAX_DOCS = 225;
for (int j = 0; j < MAX_DOCS; j++) {
Document d = new Document();
d.add(new Field(PRIORITY_FIELD, HIGH_PRIORITY, Field.Store.YES, Field.Index.ANALYZED));
d.add(new Field(ID_FIELD, Integer.toString(j), Field.Store.YES, Field.Index.ANALYZED));
writer.addDocument(d);
}
writer.close();
// try a search without OR
Searcher searcher = new IndexSearcher(directory);
ScoreDoc[] hits = null;
QueryParser parser = new QueryParser(PRIORITY_FIELD, analyzer);
Query query = parser.parse(HIGH_PRIORITY);
System.out.println("Query: " + query.toString(PRIORITY_FIELD));
hits = searcher.search(query, null, 1000).scoreDocs;
printHits(hits, searcher);
searcher.close();
// try a new search with OR
searcher = new IndexSearcher(directory);
hits = null;
parser = new QueryParser(PRIORITY_FIELD, analyzer);
query = parser.parse(HIGH_PRIORITY + " OR " + MED_PRIORITY);
System.out.println("Query: " + query.toString(PRIORITY_FIELD));
hits = searcher.search(query, null, 1000).scoreDocs;
printHits(hits, searcher);
searcher.close();
} catch (Exception e) {
System.out.println(" caught a " + e.getClass() +
"\n with message: " + e.getMessage());
}
}
private static void printHits( ScoreDoc[] hits, Searcher searcher) throws IOException {
System.out.println(hits.length + " total results\n");
for (int i = 0 ; i < hits.length; i++) {
if ( i < 10 || (i > 94 && i < 105) ) {
Document d = searcher.doc(hits[i].doc);
System.out.println(i + " " + d.get(ID_FIELD));
}
}
}
}

View File

@ -1,144 +0,0 @@
package org.apache.lucene;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util._TestUtil;
import java.util.Date;
import java.util.Random;
class StoreTest {
public static void main(String[] args) {
try {
test(1000, true, true);
} catch (Exception e) {
e.printStackTrace();
}
}
public static void test(int count, boolean ram, boolean buffered)
throws Exception {
Random gen = new Random(1251971);
int i;
Date veryStart = new Date();
Date start = new Date();
Directory store;
if (ram)
store = new RAMDirectory();
else {
String dirName = "test.store";
_TestUtil.rmDir(dirName);
store = FSDirectory.getDirectory(dirName);
}
final int LENGTH_MASK = 0xFFF;
final byte[] buffer = new byte[LENGTH_MASK];
for (i = 0; i < count; i++) {
String name = i + ".dat";
int length = gen.nextInt() & LENGTH_MASK;
byte b = (byte)(gen.nextInt() & 0x7F);
//System.out.println("filling " + name + " with " + length + " of " + b);
IndexOutput file = store.createOutput(name);
if (buffered) {
for (int j = 0; j < length; j++)
buffer[j] = b;
file.writeBytes(buffer, length);
} else {
for (int j = 0; j < length; j++)
file.writeByte(b);
}
file.close();
}
store.close();
Date end = new Date();
System.out.print(end.getTime() - start.getTime());
System.out.println(" total milliseconds to create");
gen = new Random(1251971);
start = new Date();
if (!ram)
store = FSDirectory.getDirectory("test.store");
for (i = 0; i < count; i++) {
String name = i + ".dat";
int length = gen.nextInt() & LENGTH_MASK;
byte b = (byte)(gen.nextInt() & 0x7F);
//System.out.println("reading " + name + " with " + length + " of " + b);
IndexInput file = store.openInput(name);
if (file.length() != length)
throw new Exception("length incorrect");
byte[] content = new byte[length];
if (buffered) {
file.readBytes(content, 0, length);
// check the buffer
for (int j = 0; j < length; j++)
if (content[j] != b)
throw new Exception("contents incorrect");
} else {
for (int j = 0; j < length; j++)
if (file.readByte() != b)
throw new Exception("contents incorrect");
}
file.close();
}
end = new Date();
System.out.print(end.getTime() - start.getTime());
System.out.println(" total milliseconds to read");
gen = new Random(1251971);
start = new Date();
for (i = 0; i < count; i++) {
String name = i + ".dat";
//System.out.println("deleting " + name);
store.deleteFile(name);
}
end = new Date();
System.out.print(end.getTime() - start.getTime());
System.out.println(" total milliseconds to delete");
System.out.print(end.getTime() - veryStart.getTime());
System.out.println(" total milliseconds");
store.close();
}
}

View File

@ -1,164 +0,0 @@
package org.apache.lucene;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.util.*;
import org.apache.lucene.store.*;
import org.apache.lucene.document.*;
import org.apache.lucene.analysis.*;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.queryParser.*;
import java.util.Random;
import java.io.File;
class ThreadSafetyTest {
private static final Analyzer ANALYZER = new SimpleAnalyzer();
private static final Random RANDOM = new Random();
private static Searcher SEARCHER;
private static int ITERATIONS = 1;
private static int random(int i) { // for JDK 1.1 compatibility
int r = RANDOM.nextInt();
if (r < 0) r = -r;
return r % i;
}
private static class IndexerThread extends Thread {
private final int reopenInterval = 30 + random(60);
IndexWriter writer;
public IndexerThread(IndexWriter writer) {
this.writer = writer;
}
public void run() {
try {
boolean useCompoundFiles = false;
for (int i = 0; i < 1024*ITERATIONS; i++) {
Document d = new Document();
int n = RANDOM.nextInt();
d.add(new Field("id", Integer.toString(n), Field.Store.YES, Field.Index.NOT_ANALYZED));
d.add(new Field("contents", English.intToEnglish(n), Field.Store.NO, Field.Index.ANALYZED));
System.out.println("Adding " + n);
// Switch between single and multiple file segments
useCompoundFiles = Math.random() < 0.5;
writer.setUseCompoundFile(useCompoundFiles);
writer.addDocument(d);
if (i%reopenInterval == 0) {
writer.close();
writer = new IndexWriter("index", ANALYZER, false, IndexWriter.MaxFieldLength.LIMITED);
}
}
writer.close();
} catch (Exception e) {
System.out.println(e.toString());
e.printStackTrace();
System.exit(0);
}
}
}
private static class SearcherThread extends Thread {
private IndexSearcher searcher;
private final int reopenInterval = 10 + random(20);
public SearcherThread(boolean useGlobal) throws java.io.IOException {
if (!useGlobal)
this.searcher = new IndexSearcher("index");
}
public void run() {
try {
for (int i = 0; i < 512*ITERATIONS; i++) {
searchFor(RANDOM.nextInt(), (searcher==null)?SEARCHER:searcher);
if (i%reopenInterval == 0) {
if (searcher == null) {
SEARCHER = new IndexSearcher("index");
} else {
searcher.close();
searcher = new IndexSearcher("index");
}
}
}
} catch (Exception e) {
System.out.println(e.toString());
e.printStackTrace();
System.exit(0);
}
}
private void searchFor(int n, Searcher searcher)
throws Exception {
System.out.println("Searching for " + n);
QueryParser parser = new QueryParser("contents", ANALYZER);
ScoreDoc[] hits =
searcher.search(parser.parse(English.intToEnglish(n)), null, 1000).scoreDocs;
System.out.println("Search for " + n + ": total=" + hits.length);
for (int j = 0; j < Math.min(3, hits.length); j++) {
System.out.println("Hit for " + n + ": " + searcher.doc(hits[j].doc).get("id"));
}
}
}
public static void main(String[] args) throws Exception {
boolean readOnly = false;
boolean add = false;
for (int i = 0; i < args.length; i++) {
if ("-ro".equals(args[i]))
readOnly = true;
if ("-add".equals(args[i]))
add = true;
}
File indexDir = new File("index");
if (! indexDir.exists()) indexDir.mkdirs();
IndexReader.unlock(FSDirectory.getDirectory(indexDir));
if (!readOnly) {
IndexWriter writer = new IndexWriter(indexDir, ANALYZER, !add, IndexWriter.MaxFieldLength.LIMITED);
Thread indexerThread = new IndexerThread(writer);
indexerThread.start();
Thread.sleep(1000);
}
SearcherThread searcherThread1 = new SearcherThread(false);
searcherThread1.start();
SEARCHER = new IndexSearcher(indexDir.toString());
SearcherThread searcherThread2 = new SearcherThread(true);
searcherThread2.start();
SearcherThread searcherThread3 = new SearcherThread(true);
searcherThread3.start();
}
}