LUCENE-1709: Parallelize Tests

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@928069 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2010-03-26 21:55:57 +00:00
parent 2450876809
commit 65127736f0
8 changed files with 433 additions and 89 deletions

View File

@ -16,8 +16,9 @@
*
*/
package org.apache.solr;
package org.apache.lucene.util;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.text.NumberFormat;
@ -25,6 +26,8 @@ import java.text.NumberFormat;
import junit.framework.AssertionFailedError;
import junit.framework.Test;
import org.apache.lucene.store.LockReleaseFailedException;
import org.apache.lucene.store.NativeFSLockFactory;
import org.apache.tools.ant.taskdefs.optional.junit.JUnitResultFormatter;
import org.apache.tools.ant.taskdefs.optional.junit.JUnitTest;
import org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner;
@ -37,9 +40,11 @@ import org.apache.tools.ant.util.StringUtils;
* At this point, the output is written at once in synchronized fashion.
* This way tests can run in parallel without interleaving output.
*/
public class SolrJUnitResultFormatter implements JUnitResultFormatter {
public class LuceneJUnitResultFormatter implements JUnitResultFormatter {
private static final double ONE_SECOND = 1000.0;
private NativeFSLockFactory lockFactory;
/** Where to write the log to. */
private OutputStream out;
@ -55,8 +60,21 @@ public class SolrJUnitResultFormatter implements JUnitResultFormatter {
/** Buffer output until the end of the test */
private StringBuilder sb;
private org.apache.lucene.store.Lock lock;
/** Constructor for SolrJUnitResultFormatter. */
public SolrJUnitResultFormatter() {
public LuceneJUnitResultFormatter() {
File lockDir = new File(System.getProperty("java.io.tmpdir"), "lucene_junit_lock");
lockDir.mkdirs();
if(!lockDir.exists()) {
throw new RuntimeException("Could not make Lock directory:" + lockDir);
}
try {
lockFactory = new NativeFSLockFactory(lockDir);
lock = lockFactory.makeLock("junit_lock");
} catch (IOException e) {
throw new RuntimeException(e);
}
sb = new StringBuilder();
}
@ -135,8 +153,17 @@ public class SolrJUnitResultFormatter implements JUnitResultFormatter {
if (out != null) {
try {
out.write(sb.toString().getBytes());
out.flush();
lock.obtain(5000);
try {
out.write(sb.toString().getBytes());
out.flush();
} finally {
try {
lock.release();
} catch(LockReleaseFailedException e) {
// well lets pretend its released anyway
}
}
} catch (IOException e) {
throw new RuntimeException("unable to write results", e);
} finally {
@ -227,3 +254,4 @@ public class SolrJUnitResultFormatter implements JUnitResultFormatter {
sb.append(StringUtils.LINE_SEP);
}
}

View File

@ -102,7 +102,7 @@
The source distribution does not contain sources of the previous Lucene Java version.</echo>
</target>
<target name="test-backwards" depends="compile-core, jar-core, test-backwards-message"
<target name="compile-backwards" depends="compile-core, jar-core, test-backwards-message"
description="Runs tests of a previous Lucene version." if="backwards.available">
<sequential>
<mkdir dir="${build.dir.backwards}"/>
@ -120,14 +120,64 @@ The source distribution does not contain sources of the previous Lucene Java ver
<compile-test-macro srcdir="${backwards.dir}/src/test" destdir="${build.dir.backwards}/classes/test"
test.classpath="backwards.test.compile.classpath" javac.source="${javac.source.backwards}" javac.target="${javac.target.backwards}"/>
<!-- run branch tests against trunk jar -->
<test-macro dataDir="${backwards.dir}/src/test"
tempDir="${build.dir.backwards}/test"
junit.classpath="backwards.junit.classpath"
junit.output.dir="${junit.output.dir.backwards}" />
</sequential>
</target>
<target name="test-backwards" depends="compile-backwards, junit-backwards-mkdir, junit-backwards-sequential, junit-backwards-parallel"/>
<target name="junit-backwards-mkdir">
<mkdir dir="${build.dir.backwards}/test"/>
</target>
<macrodef name="backwards-test-macro">
<attribute name="pattern" default=""/>
<sequential>
<!-- run branch tests against trunk jar -->
<test-macro
dataDir="${backwards.dir}/src/test"
tempDir="${build.dir.backwards}/test"
junit.classpath="backwards.junit.classpath"
junit.output.dir="${junit.output.dir.backwards}"
pattern="@{pattern}" />
</sequential>
</macrodef>
<target name="junit-backwards-sequential" if="runsequential">
<backwards-test-macro/>
</target>
<target name="junit-backwards-parallel" unless="runsequential">
<parallel threadsPerProcessor="2">
<backwards-test-macro pattern="A"/>
<backwards-test-macro pattern="B"/>
<backwards-test-macro pattern="C"/>
<backwards-test-macro pattern="D"/>
<backwards-test-macro pattern="E"/>
<backwards-test-macro pattern="F"/>
<backwards-test-macro pattern="G"/>
<backwards-test-macro pattern="H"/>
<backwards-test-macro pattern="I"/>
<backwards-test-macro pattern="J"/>
<backwards-test-macro pattern="K"/>
<backwards-test-macro pattern="L"/>
<backwards-test-macro pattern="M"/>
<backwards-test-macro pattern="N"/>
<backwards-test-macro pattern="O"/>
<backwards-test-macro pattern="P"/>
<backwards-test-macro pattern="Q"/>
<backwards-test-macro pattern="R"/>
<backwards-test-macro pattern="S"/>
<backwards-test-macro pattern="T"/>
<backwards-test-macro pattern="U"/>
<backwards-test-macro pattern="V"/>
<backwards-test-macro pattern="W"/>
<backwards-test-macro pattern="X"/>
<backwards-test-macro pattern="Y"/>
<backwards-test-macro pattern="Z"/>
</parallel>
</target>
<!-- ================================================================== -->
<!-- J A R -->
<!-- ================================================================== -->
@ -684,24 +734,7 @@ The source distribution does not contain sources of the previous Lucene Java ver
</target>
<target name="test-contrib" depends="build-contrib">
<!-- Don't fail on error, instead check for flag file so we run
all the tests possible and can "ant generate-test-reports"
for all of them.
Because of this, we depend on "build-contrib" even though the
Individual contrib "test" targets probably have the
neccessary dependencies. If they fail to compile, we won't
know about it.
-->
<contrib-crawl target="test" failonerror="false"/>
<available property="contribs.failed" file="junitfailed.flag">
<filepath>
<dirset dir="${build.dir}/contrib/">
<include name="**/test/" />
</dirset>
</filepath>
</available>
<fail if="contribs.failed">Contrib tests failed!</fail>
<contrib-crawl target="test" failonerror="true"/>
</target>
<!-- Macro for building checksum files -->

View File

@ -99,8 +99,14 @@
<property name="junit.output.dir.backwards" location="${build.dir.backwards}/test"/>
<property name="junit.reports" location="${build.dir}/test/reports"/>
<property name="junit.reports.backwards" location="${build.dir.backwards}/test/reports"/>
<property name="junit.includes" value="**/Test*.java,**/*Test.java"/>
<property name="junit.excludes" value=""/>
<property name="junit.details.formatter" value="org.apache.lucene.util.LuceneJUnitResultFormatter"/>
<condition property="runsequential">
<or>
<isset property="testcase"/>
<isset property="sequential-tests"/>
</or>
</condition>
<property name="manifest.file" location="${common.dir}/build/MANIFEST.MF"/>
@ -387,9 +393,9 @@
<macrodef name="test-macro" description="Executes junit tests.">
<attribute name="junit.output.dir" default="${junit.output.dir}"/>
<attribute name="junit.classpath" default="junit.classpath"/>
<attribute name="dataDir"/>
<attribute name="tempDir"/>
<element name="contrib-settings" optional="yes"/>
<attribute name="dataDir" default="src/test"/>
<attribute name="tempDir" default="${build.dir}/test"/>
<attribute name="pattern" default=""/>
<sequential>
<condition property="runall">
@ -399,7 +405,10 @@
<isset property="testpackageroot" />
</or></not>
</condition>
<mkdir dir="@{junit.output.dir}"/>
<!-- <mkdir dir="@{tempDir}/@{pattern}"/>
This is very loud and obnoxious. abuse touch instead for a "quiet" mkdir
-->
<touch file="@{tempDir}/@{pattern}/quiet.ant" verbose="false" mkdirs="true"/>
<junit printsummary="off" haltonfailure="no" maxmemory="512M"
errorProperty="tests.failed" failureProperty="tests.failed" forkmode="perBatch" dir=".">
<classpath refid="@{junit.classpath}"/>
@ -413,56 +422,85 @@
<sysproperty key="tests.verbose" value="${tests.verbose}"/>
<!-- TODO: create propertyset for test properties, so each project can have its own set -->
<sysproperty key="tempDir" file="@{tempDir}"/>
<sysproperty key="java.io.tmpdir" file="@{tempDir}"/>
<sysproperty key="tempDir" file="@{tempDir}/@{pattern}"/>
<sysproperty key="lucene.version" value="${dev.version}"/>
<contrib-settings />
<!-- set as a system property so contrib tests can have a fixed root
to reference file paths from, and "ant test" can work from
anywhere.
-->
<sysproperty key="lucene.common.dir" file="${common.dir}" />
<!-- contrib/ant IndexTaskTest needs these two system properties -->
<sysproperty key="docs.dir" file="src/test"/>
<sysproperty key="index.dir" file="${build.dir}/test/index"/>
<!-- contrib/benchmark uses this system property to locate docs data and defined tasks -->
<sysproperty key="tasks.dir" file="${build.dir}/classes/java/org/apache/lucene/benchmark/byTask/tasks"/>
<sysproperty key="benchmark.work.dir" file="@{tempDir}/@{pattern}"/>
<formatter type="xml"/>
<formatter type="brief" usefile="false"/>
<formatter classname="${junit.details.formatter}" usefile="false"/>
<batchtest fork="yes" todir="@{junit.output.dir}" if="runall">
<fileset dir="@{dataDir}" includes="${junit.includes}" excludes="${junit.excludes}"/>
<fileset dir="@{dataDir}" includes="**/Test@{pattern}*.java,**/@{pattern}*Test.java" excludes="${junit.excludes}"/>
</batchtest>
<batchtest fork="yes" todir="@{junit.output.dir}" if="testpackage">
<fileset dir="@{dataDir}" includes="**/${testpackage}/**/Test*.java,**/${testpackage}/**/*Test.java" excludes="${junit.excludes}"/>
<fileset dir="@{dataDir}" includes="**/${testpackage}/**/Test@{pattern}*.java,**/${testpackage}/**/@{pattern}*Test.java" excludes="${junit.excludes}"/>
</batchtest>
<batchtest fork="yes" todir="@{junit.output.dir}" if="testpackageroot">
<fileset dir="@{dataDir}" includes="**/${testpackageroot}/Test*.java,**/${testpackageroot}/*Test.java" excludes="${junit.excludes}"/>
<fileset dir="@{dataDir}" includes="**/${testpackageroot}/Test@{pattern}*.java,**/${testpackageroot}/@{pattern}*Test.java" excludes="${junit.excludes}"/>
</batchtest>
<batchtest fork="yes" todir="@{junit.output.dir}" if="testcase">
<fileset dir="@{dataDir}" includes="**/${testcase}.java"/>
</batchtest>
</junit>
<!-- create this file, then if we don't fail, delete it -->
<!-- this meme makes it easy to tell if contribs have failed later -->
<echo file="@{junit.output.dir}/junitfailed.flag">MAYBE</echo>
<fail if="tests.failed">Tests failed!</fail>
<!-- life would be easier if echo had an 'if' attribute like fail -->
<delete file="@{junit.output.dir}/junitfailed.flag" />
</sequential>
</macrodef>
<target name="test" depends="compile-test" description="Runs unit tests">
<test-macro dataDir="src/test" tempDir="${build.dir}/test">
<contrib-settings>
<!-- set as a system property so contrib tests can have a fixed root
to reference file paths from, and "ant test" can work from
anywhere.
-->
<sysproperty key="lucene.common.dir" file="${common.dir}" />
<!-- contrib/ant IndexTaskTest needs these two system properties -->
<sysproperty key="docs.dir" file="src/test"/>
<sysproperty key="index.dir" file="${build.dir}/test/index"/>
<!-- contrib/benchmark uses this system property to locate docs data and defined tasks -->
<sysproperty key="tasks.dir" file="${build.dir}/classes/java/org/apache/lucene/benchmark/byTask/tasks"/>
<sysproperty key="benchmark.work.dir" file="${common.dir}/contrib/benchmark/work"/>
</contrib-settings>
</test-macro>
<target name="test" depends="compile-test,junit-mkdir,junit-sequential,junit-parallel" description="Runs unit tests"/>
<target name="junit-mkdir">
<mkdir dir="${junit.output.dir}"/>
</target>
<target name="junit-sequential" if="runsequential">
<test-macro/>
</target>
<target name="junit-parallel" unless="runsequential">
<parallel threadsPerProcessor="2">
<test-macro pattern="A"/>
<test-macro pattern="B"/>
<test-macro pattern="C"/>
<test-macro pattern="D"/>
<test-macro pattern="E"/>
<test-macro pattern="F"/>
<test-macro pattern="G"/>
<test-macro pattern="H"/>
<test-macro pattern="I"/>
<test-macro pattern="J"/>
<test-macro pattern="K"/>
<test-macro pattern="L"/>
<test-macro pattern="M"/>
<test-macro pattern="N"/>
<test-macro pattern="O"/>
<test-macro pattern="P"/>
<test-macro pattern="Q"/>
<test-macro pattern="R"/>
<test-macro pattern="S"/>
<test-macro pattern="T"/>
<test-macro pattern="U"/>
<test-macro pattern="V"/>
<test-macro pattern="W"/>
<test-macro pattern="X"/>
<test-macro pattern="Y"/>
<test-macro pattern="Z"/>
</parallel>
</target>
<!--
If you want clover test code coverage, run this before the tests. You need clover.jar and the license in your ANT classspath and you need to specify -Drun.clover=true on the command line.

View File

@ -19,10 +19,10 @@ package org.apache.lucene.benchmark;
import java.io.File;
import junit.framework.TestCase;
import org.apache.lucene.util.LuceneTestCase;
/** Base class for all Benchmark unit tests. */
public class BenchmarkTestCase extends TestCase {
public class BenchmarkTestCase extends LuceneTestCase {
private static final File workDir;

View File

@ -24,6 +24,7 @@ import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import org.apache.lucene.benchmark.BenchmarkTestCase;
import org.apache.lucene.benchmark.byTask.TestPerfTasksLogic;
import org.apache.lucene.benchmark.quality.Judge;
import org.apache.lucene.benchmark.quality.QualityQuery;
@ -44,44 +45,30 @@ import org.apache.lucene.util.LuceneTestCase;
* this test will not work correctly, as it does not dynamically
* generate its test trec topics/qrels!
*/
public class TestQualityRun extends LuceneTestCase {
public class TestQualityRun extends BenchmarkTestCase {
public TestQualityRun(String name) {
super(name);
}
public void testTrecQuality() throws Exception {
// first create the partial reuters index
createReutersIndex();
File workDir = new File(System.getProperty("benchmark.work.dir","work"));
assertTrue("Bad workDir: "+workDir, workDir.exists()&& workDir.isDirectory());
int maxResults = 1000;
String docNameField = "doctitle"; // orig docID is in the linedoc format title
PrintWriter logger = VERBOSE ? new PrintWriter(System.out,true) : null;
// <tests src dir> for topics/qrels files - src/test/org/apache/lucene/benchmark/quality
File srcTestDir = new File(new File(new File(new File(new File(
new File(new File(workDir.getAbsoluteFile().getParentFile(),
"src"),"test"),"org"),"apache"),"lucene"),"benchmark"),"quality");
// prepare topics
File topicsFile = new File(srcTestDir, "trecTopics.txt");
assertTrue("Bad topicsFile: "+topicsFile, topicsFile.exists()&& topicsFile.isFile());
InputStream topics = getClass().getResourceAsStream("trecTopics.txt");
TrecTopicsReader qReader = new TrecTopicsReader();
QualityQuery qqs[] = qReader.readQueries(new BufferedReader(new FileReader(topicsFile)));
QualityQuery qqs[] = qReader.readQueries(new BufferedReader(new InputStreamReader(topics, "UTF-8")));
// prepare judge
File qrelsFile = new File(srcTestDir, "trecQRels.txt");
assertTrue("Bad qrelsFile: "+qrelsFile, qrelsFile.exists()&& qrelsFile.isFile());
Judge judge = new TrecJudge(new BufferedReader(new FileReader(qrelsFile)));
InputStream qrels = getClass().getResourceAsStream("trecQRels.txt");
Judge judge = new TrecJudge(new BufferedReader(new InputStreamReader(qrels, "UTF-8")));
// validate topics & judgments match each other
judge.validateData(qqs, logger);
IndexSearcher searcher = new IndexSearcher(FSDirectory.open(new File(workDir,"index")), true);
IndexSearcher searcher = new IndexSearcher(FSDirectory.open(new File(getWorkDir(),"index")), true);
QualityQueryParser qqParser = new SimpleQQParser("title","body");
QualityBenchmark qrun = new QualityBenchmark(qqs, qqParser, searcher, docNameField);

View File

@ -0,0 +1,257 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.lucene.util;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.text.NumberFormat;
import junit.framework.AssertionFailedError;
import junit.framework.Test;
import org.apache.lucene.store.LockReleaseFailedException;
import org.apache.lucene.store.NativeFSLockFactory;
import org.apache.tools.ant.taskdefs.optional.junit.JUnitResultFormatter;
import org.apache.tools.ant.taskdefs.optional.junit.JUnitTest;
import org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner;
import org.apache.tools.ant.util.FileUtils;
import org.apache.tools.ant.util.StringUtils;
/**
* Just like BriefJUnitResultFormatter "brief" bundled with ant,
* except all formatted text is buffered until the test suite is finished.
* At this point, the output is written at once in synchronized fashion.
* This way tests can run in parallel without interleaving output.
*/
public class LuceneJUnitResultFormatter implements JUnitResultFormatter {
private static final double ONE_SECOND = 1000.0;
private NativeFSLockFactory lockFactory;
/** Where to write the log to. */
private OutputStream out;
/** Formatter for timings. */
private NumberFormat numberFormat = NumberFormat.getInstance();
/** Output suite has written to System.out */
private String systemOutput = null;
/** Output suite has written to System.err */
private String systemError = null;
/** Buffer output until the end of the test */
private StringBuilder sb;
private org.apache.lucene.store.Lock lock;
/** Constructor for SolrJUnitResultFormatter. */
public LuceneJUnitResultFormatter() {
File lockDir = new File(System.getProperty("java.io.tmpdir"), "lucene_junit_lock");
lockDir.mkdirs();
if(!lockDir.exists()) {
throw new RuntimeException("Could not make Lock directory:" + lockDir);
}
try {
lockFactory = new NativeFSLockFactory(lockDir);
lock = lockFactory.makeLock("junit_lock");
} catch (IOException e) {
throw new RuntimeException(e);
}
sb = new StringBuilder();
}
/**
* Sets the stream the formatter is supposed to write its results to.
* @param out the output stream to write to
*/
public void setOutput(OutputStream out) {
this.out = out;
}
/**
* @see JUnitResultFormatter#setSystemOutput(String)
*/
/** {@inheritDoc}. */
public void setSystemOutput(String out) {
systemOutput = out;
}
/**
* @see JUnitResultFormatter#setSystemError(String)
*/
/** {@inheritDoc}. */
public void setSystemError(String err) {
systemError = err;
}
/**
* The whole testsuite started.
* @param suite the test suite
*/
public synchronized void startTestSuite(JUnitTest suite) {
if (out == null) {
return; // Quick return - no output do nothing.
}
sb.setLength(0);
sb.append("Testsuite: ");
sb.append(suite.getName());
sb.append(StringUtils.LINE_SEP);
}
/**
* The whole testsuite ended.
* @param suite the test suite
*/
public synchronized void endTestSuite(JUnitTest suite) {
sb.append("Tests run: ");
sb.append(suite.runCount());
sb.append(", Failures: ");
sb.append(suite.failureCount());
sb.append(", Errors: ");
sb.append(suite.errorCount());
sb.append(", Time elapsed: ");
sb.append(numberFormat.format(suite.getRunTime() / ONE_SECOND));
sb.append(" sec");
sb.append(StringUtils.LINE_SEP);
sb.append(StringUtils.LINE_SEP);
// append the err and output streams to the log
if (systemOutput != null && systemOutput.length() > 0) {
sb.append("------------- Standard Output ---------------")
.append(StringUtils.LINE_SEP)
.append(systemOutput)
.append("------------- ---------------- ---------------")
.append(StringUtils.LINE_SEP);
}
if (systemError != null && systemError.length() > 0) {
sb.append("------------- Standard Error -----------------")
.append(StringUtils.LINE_SEP)
.append(systemError)
.append("------------- ---------------- ---------------")
.append(StringUtils.LINE_SEP);
}
if (out != null) {
try {
lock.obtain(5000);
try {
out.write(sb.toString().getBytes());
out.flush();
} finally {
try {
lock.release();
} catch(LockReleaseFailedException e) {
// well lets pretend its released anyway
}
}
} catch (IOException e) {
throw new RuntimeException("unable to write results", e);
} finally {
if (out != System.out && out != System.err) {
FileUtils.close(out);
}
}
}
}
/**
* A test started.
* @param test a test
*/
public void startTest(Test test) {
}
/**
* A test ended.
* @param test a test
*/
public void endTest(Test test) {
}
/**
* Interface TestListener for JUnit &lt;= 3.4.
*
* <p>A Test failed.
* @param test a test
* @param t the exception thrown by the test
*/
public void addFailure(Test test, Throwable t) {
formatError("\tFAILED", test, t);
}
/**
* Interface TestListener for JUnit &gt; 3.4.
*
* <p>A Test failed.
* @param test a test
* @param t the assertion failed by the test
*/
public void addFailure(Test test, AssertionFailedError t) {
addFailure(test, (Throwable) t);
}
/**
* A test caused an error.
* @param test a test
* @param error the error thrown by the test
*/
public void addError(Test test, Throwable error) {
formatError("\tCaused an ERROR", test, error);
}
/**
* Format the test for printing..
* @param test a test
* @return the formatted testname
*/
protected String formatTest(Test test) {
if (test == null) {
return "Null Test: ";
} else {
return "Testcase: " + test.toString() + ":";
}
}
/**
* Format an error and print it.
* @param type the type of error
* @param test the test that failed
* @param error the exception that the test threw
*/
protected synchronized void formatError(String type, Test test,
Throwable error) {
if (test != null) {
endTest(test);
}
sb.append(formatTest(test) + type);
sb.append(StringUtils.LINE_SEP);
sb.append(error.getMessage());
sb.append(StringUtils.LINE_SEP);
String strace = JUnitTestRunner.getFilteredTrace(error);
sb.append(strace);
sb.append(StringUtils.LINE_SEP);
sb.append(StringUtils.LINE_SEP);
}
}

View File

@ -349,6 +349,7 @@
<pathelement location="${dest}/tests"/>
<!-- include the solrj classpath and jetty files included in example -->
<path refid="compile.classpath.solrj" />
<pathelement location="${common-solr.dir}/../lucene/build/classes/test" /> <!-- include some lucene test code -->
<pathelement path="${java.class.path}"/>
</path>

View File

@ -103,7 +103,7 @@
<property name="junit.output.dir" location="${common-solr.dir}/${dest}/test-results"/>
<property name="junit.reports" location="${common-solr.dir}/${dest}/test-results/reports"/>
<property name="junit.formatter" value="plain"/>
<property name="junit.details.formatter" value="org.apache.solr.SolrJUnitResultFormatter"/>
<property name="junit.details.formatter" value="org.apache.lucene.util.LuceneJUnitResultFormatter"/>
<!-- Maven properties -->
<property name="maven.build.dir" value="${basedir}/build/maven"/>