mirror of https://github.com/apache/lucene.git
LUCENE-8546: Fix ant beast to fail and succeed based on whether beasting actually fails or succeeds.
LUCENE-8541: Fix ant beast to not overwrite junit xml results for each beast.iters iteration.
This commit is contained in:
parent
6e4c9b3b58
commit
16241f4484
|
@ -276,6 +276,13 @@
|
|||
<isset property="fetch.sources.javadocs"/>
|
||||
</condition>
|
||||
|
||||
<target name="install-ant-contrib" unless="ant-contrib.uptodate" depends="ivy-availability-check,ivy-fail,ivy-configure">
|
||||
<property name="ant-contrib.uptodate" value="true"/>
|
||||
<ivy:cachepath organisation="ant-contrib" module="ant-contrib" revision="1.0b3"
|
||||
inline="true" conf="master" type="jar" pathid="ant-contrib.classpath"/>
|
||||
<taskdef resource="ant-contrib.tasks" classpathref="ant-contrib.classpath"/>
|
||||
</target>
|
||||
|
||||
<!-- Check for minimum supported ANT version. -->
|
||||
<fail message="Minimum supported ANT version is 1.8.2. Yours: ${ant.version}">
|
||||
<condition>
|
||||
|
@ -1272,18 +1279,16 @@
|
|||
showSuiteSummary="false"
|
||||
timestamps="true"
|
||||
/>
|
||||
|
||||
|
||||
<!-- Emit the information about tests timings (could be used to determine
|
||||
the slowest tests or for reuse in balancing). -->
|
||||
<junit4:report-execution-times file="${tests.cachedir}/${name}/timehints.txt" historyLength="20" />
|
||||
|
||||
<!-- ANT-compatible XMLs for jenkins records etc. -->
|
||||
<junit4:report-ant-xml dir="@{junit.output.dir}" outputStreams="no" />
|
||||
<junit4:report-ant-xml dir="@{junit.output.dir}" outputStreams="no" ignoreDuplicateSuites="true"/>
|
||||
|
||||
<!-- <junit4:report-json file="@{junit.output.dir}/tests-report-${ant.project.name}/index.html" outputStreams="no" /> -->
|
||||
|
||||
<!--
|
||||
Enable if you wish to have a nice HTML5 report.
|
||||
<junit4:report-json file="@{junit.output.dir}/tests-report-${ant.project.name}/index.html" outputStreams="no" />
|
||||
-->
|
||||
</listeners>
|
||||
|
||||
<!-- Input test classes. -->
|
||||
|
@ -1508,7 +1513,7 @@ ${tests-output}/junit4-*.suites - per-JVM executed suites
|
|||
</target>
|
||||
|
||||
<target name="test" depends="clover,compile-test,patch-mrjar-classes,install-junit4-taskdef,validate,-init-totals,-test,-check-totals" description="Runs unit tests"/>
|
||||
<target name="beast" depends="clover,compile-test,patch-mrjar-classes,install-junit4-taskdef,validate,-init-totals,-beast,-check-totals" description="Runs unit tests in a loop (-Dbeast.iters=n)"/>
|
||||
<target name="beast" depends="install-ant-contrib,clover,compile-test,patch-mrjar-classes,install-junit4-taskdef,validate,-init-totals,-beast,-check-totals" description="Runs unit tests in a loop (-Dbeast.iters=n)"/>
|
||||
|
||||
<target name="test-nocompile" depends="-clover.disable,install-junit4-taskdef,-init-totals,-test,-check-totals"
|
||||
description="Only runs unit tests. Jars are not downloaded; compilation is not updated; and Clover is not enabled."/>
|
||||
|
@ -1568,13 +1573,15 @@ ${tests-output}/junit4-*.suites - per-JVM executed suites
|
|||
</target>
|
||||
|
||||
<!-- Beast the actual tests (must be wrapped with -init-totals, -check-totals) -->
|
||||
<target name="-beast" depends="resolve-groovy">
|
||||
<target name="-beast" depends="resolve-groovy,install-ant-contrib">
|
||||
<fail message="The Beast only works inside of individual modules (where 'junit.classpath' is defined)">
|
||||
<condition>
|
||||
<not><isreference refid="junit.classpath"/></not>
|
||||
</condition>
|
||||
</fail>
|
||||
<groovy taskname="beaster" src="${common.dir}/tools/src/groovy/run-beaster.groovy"/>
|
||||
<taskdef name="antcallback" classname="net.sf.antcontrib.logic.AntCallBack" classpathref="ant-contrib.classpath"/>
|
||||
<groovy classpathref="ant-contrib.classpath" taskname="beaster" src="${common.dir}/tools/src/groovy/run-beaster.groovy"/>
|
||||
<fail message="Fail baby fail" if="groovy.error"/>
|
||||
</target>
|
||||
|
||||
<target name="-check-totals" if="tests.totals.toplevel" depends="resolve-groovy">
|
||||
|
|
|
@ -23,24 +23,32 @@ import org.apache.tools.ant.BuildException;
|
|||
import org.apache.tools.ant.BuildLogger;
|
||||
import org.apache.tools.ant.Project;
|
||||
|
||||
|
||||
static boolean logFailOutput(Object task, String outdir) {
|
||||
def logFile = new File(outdir, "tests-failures.txt");
|
||||
if (logFile.exists()) {
|
||||
logFile.eachLine("UTF-8", { line ->
|
||||
task.log(line, Project.MSG_ERR);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
int iters = (properties['beast.iters'] ?: '1') as int;
|
||||
if (iters <= 1) {
|
||||
throw new BuildException("Please give -Dbeast.iters with an int value > 1.");
|
||||
}
|
||||
|
||||
def antcall = project.createTask('antcall');
|
||||
antcall.with {
|
||||
target = '-test';
|
||||
inheritAll = true;
|
||||
inheritRefs = true;
|
||||
createParam().with {
|
||||
name = "tests.isbeasting";
|
||||
value = "true";
|
||||
};
|
||||
};
|
||||
def antcall = project.createTask('antcallback');
|
||||
|
||||
def junitOutDir = properties["junit.output.dir"];
|
||||
|
||||
def failed = false;
|
||||
|
||||
(1..iters).each { i ->
|
||||
task.log('Beast round: ' + i, Project.MSG_INFO);
|
||||
|
||||
def outdir = junitOutDir + "/" + i;
|
||||
task.log('Beast round ' + i + " results: " + outdir, Project.MSG_INFO);
|
||||
|
||||
try {
|
||||
// disable verbose build logging:
|
||||
project.buildListeners.each { listener ->
|
||||
|
@ -49,15 +57,46 @@ antcall.with {
|
|||
}
|
||||
};
|
||||
|
||||
new File(outdir).mkdirs();
|
||||
|
||||
properties["junit.output.dir"] = outdir;
|
||||
|
||||
antcall.setReturn("tests.failed");
|
||||
antcall.setTarget("-test");
|
||||
antcall.setInheritAll(true);
|
||||
antcall.setInheritRefs(true);
|
||||
|
||||
antcall.with {
|
||||
|
||||
createParam().with {
|
||||
name = "tests.isbeasting";
|
||||
value = "true";
|
||||
};
|
||||
createParam().with {
|
||||
name = "tests.timeoutSuite";
|
||||
value = "900000";
|
||||
};
|
||||
createParam().with {
|
||||
name = "junit.output.dir";
|
||||
value = outdir;
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
properties["junit.output.dir"] = outdir;
|
||||
|
||||
antcall.execute();
|
||||
|
||||
def antcallResult = project.properties.'tests.failed' as boolean;
|
||||
|
||||
if (antcallResult) {
|
||||
failed = true;
|
||||
logFailOutput(task, outdir)
|
||||
}
|
||||
|
||||
} catch (BuildException be) {
|
||||
def logFile = new File(properties["junit.output.dir"], "tests-failures.txt");
|
||||
if (logFile.exists()) {
|
||||
logFile.eachLine("UTF-8", { line ->
|
||||
task.log(line, Project.MSG_ERR);
|
||||
});
|
||||
}
|
||||
task.log(be.getMessage(), Project.MSG_ERR);
|
||||
logFailOutput(task, outdir)
|
||||
throw be;
|
||||
} finally {
|
||||
// restore build logging (unfortunately there is no way to get the original logging level (write-only property):
|
||||
|
@ -68,4 +107,15 @@ antcall.with {
|
|||
};
|
||||
}
|
||||
};
|
||||
task.log('Beasting finished.', Project.MSG_INFO);
|
||||
|
||||
// restore junit output dir
|
||||
properties["junit.output.dir"] = junitOutDir;
|
||||
|
||||
|
||||
if (failed) {
|
||||
task.log('Beasting finished with failure.', Project.MSG_INFO);
|
||||
throw new BuildException("Beasting Failed!");
|
||||
} else {
|
||||
task.log('Beasting finished Successfully.', Project.MSG_INFO);
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue