2022-09-23 11:43:11 -04:00
|
|
|
/*
|
|
|
|
* Hibernate, Relational Persistence for Idiomatic Java
|
|
|
|
*
|
|
|
|
* License: GNU Lesser General Public License (LGPL), version 2.1 or later.
|
|
|
|
* See the lgpl.txt file in the root directory or <http://www.gnu.org/licenses/lgpl-2.1.html>.
|
|
|
|
*/
|
|
|
|
|
|
|
|
import groovy.transform.Field
|
|
|
|
import io.jenkins.blueocean.rest.impl.pipeline.PipelineNodeGraphVisitor
|
|
|
|
import io.jenkins.blueocean.rest.impl.pipeline.FlowNodeWrapper
|
|
|
|
import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
|
|
|
|
|
|
|
|
/*
|
|
|
|
* See https://github.com/hibernate/hibernate-jenkins-pipeline-helpers
|
|
|
|
*/
|
|
|
|
@Library('hibernate-jenkins-pipeline-helpers@1.5') _
|
|
|
|
import org.hibernate.jenkins.pipeline.helpers.job.JobHelper
|
|
|
|
|
|
|
|
@Field final String DEFAULT_JDK_VERSION = '11'
|
|
|
|
@Field final String DEFAULT_JDK_TOOL = "OpenJDK ${DEFAULT_JDK_VERSION} Latest"
|
|
|
|
@Field final String NODE_PATTERN_BASE = 'Worker&&Containers'
|
|
|
|
@Field List<BuildEnvironment> environments
|
|
|
|
|
|
|
|
this.helper = new JobHelper(this)
|
|
|
|
|
|
|
|
helper.runWithNotification {
|
|
|
|
stage('Configure') {
|
|
|
|
this.environments = [
|
2022-11-03 10:15:01 -04:00
|
|
|
// Minimum supported versions
|
|
|
|
new BuildEnvironment( dbName: 'h2_1_4' ),
|
|
|
|
new BuildEnvironment( dbName: 'hsqldb_2_6' ),
|
|
|
|
new BuildEnvironment( dbName: 'derby_10_14' ),
|
|
|
|
new BuildEnvironment( dbName: 'mysql_5_7' ),
|
|
|
|
new BuildEnvironment( dbName: 'mariadb_10_3' ),
|
|
|
|
new BuildEnvironment( dbName: 'postgresql_10' ),
|
|
|
|
new BuildEnvironment( dbName: 'edb_10' ),
|
|
|
|
new BuildEnvironment( dbName: 'oracle_11_2' ),
|
|
|
|
new BuildEnvironment( dbName: 'db2_10_5', longRunning: true ),
|
|
|
|
new BuildEnvironment( dbName: 'mssql_2017' ), // Unfortunately there is no SQL Server 2008 image, so we have to test with 2017
|
|
|
|
// new BuildEnvironment( dbName: 'sybase_16' ), // There only is a Sybase ASE 16 image, so no pint in testing that nightly
|
2023-08-07 09:54:44 -04:00
|
|
|
new BuildEnvironment( dbName: 'sybase_jconn' ),
|
2022-11-03 10:15:01 -04:00
|
|
|
// Long running databases
|
2022-10-21 10:06:45 -04:00
|
|
|
new BuildEnvironment( dbName: 'cockroachdb', node: 'cockroachdb', longRunning: true ),
|
2023-07-28 02:45:08 -04:00
|
|
|
new BuildEnvironment( dbName: 'cockroachdb_22_1', node: 'cockroachdb', longRunning: true ),
|
2023-05-10 04:00:13 -04:00
|
|
|
new BuildEnvironment( dbName: 'cockroachdb_23_1', node: 'cockroachdb', longRunning: true ),
|
2022-10-21 10:06:45 -04:00
|
|
|
new BuildEnvironment( dbName: 'hana_cloud', dbLockableResource: 'hana-cloud', dbLockResourceAsHost: true )
|
2022-09-23 11:43:11 -04:00
|
|
|
];
|
|
|
|
|
|
|
|
helper.configure {
|
|
|
|
file 'job-configuration.yaml'
|
|
|
|
// We don't require the following, but the build helper plugin apparently does
|
|
|
|
jdk {
|
|
|
|
defaultTool DEFAULT_JDK_TOOL
|
|
|
|
}
|
|
|
|
maven {
|
|
|
|
defaultTool 'Apache Maven 3.8'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
properties([
|
|
|
|
buildDiscarder(
|
|
|
|
logRotator(daysToKeepStr: '30', numToKeepStr: '10')
|
|
|
|
),
|
|
|
|
rateLimitBuilds(throttle: [count: 1, durationName: 'day', userBoost: true]),
|
|
|
|
// If two builds are about the same branch or pull request,
|
|
|
|
// the older one will be aborted when the newer one starts.
|
|
|
|
disableConcurrentBuilds(abortPrevious: true),
|
|
|
|
helper.generateNotificationProperty()
|
|
|
|
])
|
|
|
|
}
|
|
|
|
|
|
|
|
// Avoid running the pipeline on branch indexing
|
|
|
|
if (currentBuild.getBuildCauses().toString().contains('BranchIndexingCause')) {
|
|
|
|
print "INFO: Build skipped due to trigger being Branch Indexing"
|
|
|
|
currentBuild.result = 'ABORTED'
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
stage('Build') {
|
|
|
|
Map<String, Closure> executions = [:]
|
|
|
|
Map<String, Map<String, String>> state = [:]
|
|
|
|
environments.each { BuildEnvironment buildEnv ->
|
|
|
|
// Don't build environments for newer JDKs when this is a PR
|
|
|
|
if ( helper.scmSource.pullRequest && buildEnv.testJdkVersion ) {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
state[buildEnv.tag] = [:]
|
|
|
|
executions.put(buildEnv.tag, {
|
|
|
|
runBuildOnNode(buildEnv.node ?: NODE_PATTERN_BASE) {
|
|
|
|
def testJavaHome
|
|
|
|
if ( buildEnv.testJdkVersion ) {
|
|
|
|
testJavaHome = tool(name: "OpenJDK ${buildEnv.testJdkVersion} Latest", type: 'jdk')
|
|
|
|
}
|
|
|
|
def javaHome = tool(name: DEFAULT_JDK_TOOL, type: 'jdk')
|
|
|
|
// Use withEnv instead of setting env directly, as that is global!
|
|
|
|
// See https://github.com/jenkinsci/pipeline-plugin/blob/master/TUTORIAL.md
|
|
|
|
withEnv(["JAVA_HOME=${javaHome}", "PATH+JAVA=${javaHome}/bin"]) {
|
|
|
|
state[buildEnv.tag]['additionalOptions'] = ''
|
|
|
|
if ( testJavaHome ) {
|
|
|
|
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
|
|
|
|
" -Ptest.jdk.version=${buildEnv.testJdkVersion} -Porg.gradle.java.installations.paths=${javaHome},${testJavaHome}"
|
|
|
|
}
|
|
|
|
if ( buildEnv.testJdkLauncherArgs ) {
|
|
|
|
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
|
|
|
|
" -Ptest.jdk.launcher.args=${buildEnv.testJdkLauncherArgs}"
|
|
|
|
}
|
|
|
|
state[buildEnv.tag]['containerName'] = null;
|
|
|
|
stage('Checkout') {
|
|
|
|
checkout scm
|
|
|
|
}
|
|
|
|
try {
|
|
|
|
stage('Start database') {
|
|
|
|
switch (buildEnv.dbName) {
|
2022-11-03 10:15:01 -04:00
|
|
|
case "h2_1_4":
|
|
|
|
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
|
|
|
|
" -Pgradle.libs.versions.h2=1.4.197 -Pgradle.libs.versions.h2gis=1.5.0"
|
|
|
|
break;
|
|
|
|
case "hsqldb_2_6":
|
|
|
|
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
|
|
|
|
" -Pgradle.libs.versions.hsqldb=2.6.1"
|
|
|
|
break;
|
|
|
|
case "derby_10_14":
|
|
|
|
state[buildEnv.tag]['additionalOptions'] = state[buildEnv.tag]['additionalOptions'] +
|
|
|
|
" -Pgradle.libs.versions.derby=10.14.2.0"
|
2022-09-23 11:43:11 -04:00
|
|
|
break;
|
|
|
|
case "mysql":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
2022-11-03 10:15:01 -04:00
|
|
|
docker.image('mysql:8.0.31').pull()
|
2022-09-23 11:43:11 -04:00
|
|
|
}
|
|
|
|
sh "./docker_db.sh mysql"
|
|
|
|
state[buildEnv.tag]['containerName'] = "mysql"
|
|
|
|
break;
|
2022-11-03 10:15:01 -04:00
|
|
|
case "mysql_5_7":
|
2022-09-23 11:43:11 -04:00
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
2022-11-03 10:15:01 -04:00
|
|
|
docker.image('mysql:5.7.40').pull()
|
2022-09-23 11:43:11 -04:00
|
|
|
}
|
2022-11-03 10:15:01 -04:00
|
|
|
sh "./docker_db.sh mysql_5_7"
|
2022-09-23 11:43:11 -04:00
|
|
|
state[buildEnv.tag]['containerName'] = "mysql"
|
|
|
|
break;
|
|
|
|
case "mariadb":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
2022-11-03 10:15:01 -04:00
|
|
|
docker.image('mariadb:10.9.3').pull()
|
2022-09-23 11:43:11 -04:00
|
|
|
}
|
|
|
|
sh "./docker_db.sh mariadb"
|
|
|
|
state[buildEnv.tag]['containerName'] = "mariadb"
|
|
|
|
break;
|
2022-11-03 10:15:01 -04:00
|
|
|
case "mariadb_10_3":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
|
|
|
docker.image('mariadb:10.3.36').pull()
|
|
|
|
}
|
|
|
|
sh "./docker_db.sh mariadb_10_3"
|
|
|
|
state[buildEnv.tag]['containerName'] = "mariadb"
|
|
|
|
break;
|
2022-09-23 11:43:11 -04:00
|
|
|
case "postgresql":
|
|
|
|
// use the postgis image to enable the PGSQL GIS (spatial) extension
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
2022-12-14 14:39:34 -05:00
|
|
|
docker.image('postgis/postgis:15-3.3').pull()
|
2022-09-23 11:43:11 -04:00
|
|
|
}
|
|
|
|
sh "./docker_db.sh postgresql"
|
|
|
|
state[buildEnv.tag]['containerName'] = "postgres"
|
|
|
|
break;
|
2022-11-03 10:15:01 -04:00
|
|
|
case "postgresql_10":
|
2022-09-23 11:43:11 -04:00
|
|
|
// use the postgis image to enable the PGSQL GIS (spatial) extension
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
2022-11-03 10:15:01 -04:00
|
|
|
docker.image('postgis/postgis:10-2.5').pull()
|
2022-09-23 11:43:11 -04:00
|
|
|
}
|
2022-11-03 10:15:01 -04:00
|
|
|
sh "./docker_db.sh postgresql_10"
|
2022-09-23 11:43:11 -04:00
|
|
|
state[buildEnv.tag]['containerName'] = "postgres"
|
|
|
|
break;
|
2022-11-03 10:15:01 -04:00
|
|
|
case "edb":
|
2023-05-02 05:23:46 -04:00
|
|
|
docker.image('quay.io/enterprisedb/edb-postgres-advanced:15.2-3.3-postgis').pull()
|
2022-11-03 10:15:01 -04:00
|
|
|
sh "./docker_db.sh edb"
|
|
|
|
state[buildEnv.tag]['containerName'] = "edb"
|
|
|
|
break;
|
|
|
|
case "edb_10":
|
|
|
|
docker.image('quay.io/enterprisedb/edb-postgres-advanced:10.22').pull()
|
|
|
|
sh "./docker_db.sh edb_10"
|
|
|
|
state[buildEnv.tag]['containerName'] = "edb"
|
|
|
|
break;
|
2022-09-23 11:43:11 -04:00
|
|
|
case "oracle":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
2022-11-03 10:15:01 -04:00
|
|
|
docker.image('gvenzl/oracle-xe:21.3.0-full').pull()
|
2022-09-23 11:43:11 -04:00
|
|
|
}
|
2022-11-03 10:15:01 -04:00
|
|
|
sh "./docker_db.sh oracle"
|
|
|
|
state[buildEnv.tag]['containerName'] = "oracle"
|
|
|
|
break;
|
|
|
|
case "oracle_11_2":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
|
|
|
docker.image('gvenzl/oracle-xe:11.2.0.2-full').pull()
|
|
|
|
}
|
|
|
|
sh "./docker_db.sh oracle_11"
|
2022-09-23 11:43:11 -04:00
|
|
|
state[buildEnv.tag]['containerName'] = "oracle"
|
|
|
|
break;
|
|
|
|
case "db2":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
|
|
|
docker.image('ibmcom/db2:11.5.7.0').pull()
|
|
|
|
}
|
|
|
|
sh "./docker_db.sh db2"
|
|
|
|
state[buildEnv.tag]['containerName'] = "db2"
|
|
|
|
break;
|
2022-11-03 10:15:01 -04:00
|
|
|
case "db2_10_5":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
|
|
|
docker.image('ibmoms/db2express-c@sha256:a499afd9709a1f69fb41703e88def9869955234c3525547e2efc3418d1f4ca2b').pull()
|
|
|
|
}
|
|
|
|
sh "./docker_db.sh db2_10_5"
|
|
|
|
state[buildEnv.tag]['containerName'] = "db2"
|
|
|
|
break;
|
2022-09-23 11:43:11 -04:00
|
|
|
case "mssql":
|
|
|
|
docker.image('mcr.microsoft.com/mssql/server@sha256:f54a84b8a802afdfa91a954e8ddfcec9973447ce8efec519adf593b54d49bedf').pull()
|
|
|
|
sh "./docker_db.sh mssql"
|
|
|
|
state[buildEnv.tag]['containerName'] = "mssql"
|
|
|
|
break;
|
2022-11-03 10:15:01 -04:00
|
|
|
case "mssql_2017":
|
|
|
|
docker.image('mcr.microsoft.com/mssql/server@sha256:7d194c54e34cb63bca083542369485c8f4141596805611e84d8c8bab2339eede').pull()
|
|
|
|
sh "./docker_db.sh mssql_2017"
|
|
|
|
state[buildEnv.tag]['containerName'] = "mssql"
|
|
|
|
break;
|
2022-09-23 11:43:11 -04:00
|
|
|
case "sybase":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
|
|
|
docker.image('nguoianphu/docker-sybase').pull()
|
|
|
|
}
|
|
|
|
sh "./docker_db.sh sybase"
|
|
|
|
state[buildEnv.tag]['containerName'] = "sybase"
|
|
|
|
break;
|
2023-08-07 09:54:44 -04:00
|
|
|
case "sybase_jconn":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
|
|
|
docker.image('nguoianphu/docker-sybase').pull()
|
|
|
|
}
|
|
|
|
sh "./docker_db.sh sybase"
|
|
|
|
state[buildEnv.tag]['containerName'] = "sybase"
|
|
|
|
break;
|
2022-11-03 10:15:01 -04:00
|
|
|
case "cockroachdb":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
2023-01-10 12:13:14 -05:00
|
|
|
docker.image('cockroachdb/cockroach:v22.2.2').pull()
|
2022-11-03 10:15:01 -04:00
|
|
|
}
|
|
|
|
sh "./docker_db.sh cockroachdb"
|
|
|
|
state[buildEnv.tag]['containerName'] = "cockroach"
|
|
|
|
break;
|
2023-05-08 18:01:15 -04:00
|
|
|
case "cockroachdb_22_1":
|
2022-11-03 10:15:01 -04:00
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
2023-05-08 18:01:15 -04:00
|
|
|
docker.image('cockroachdb/cockroach:v22.1.13').pull()
|
2022-11-03 10:15:01 -04:00
|
|
|
}
|
2023-05-08 18:01:15 -04:00
|
|
|
sh "./docker_db.sh cockroachdb_22_1"
|
2022-11-03 10:15:01 -04:00
|
|
|
state[buildEnv.tag]['containerName'] = "cockroach"
|
2022-09-23 11:43:11 -04:00
|
|
|
break;
|
2023-05-10 04:00:13 -04:00
|
|
|
case "cockroachdb_23_1":
|
|
|
|
docker.withRegistry('https://index.docker.io/v1/', 'hibernateci.hub.docker.com') {
|
|
|
|
docker.image('cockroachdb/cockroach-unstable:v23.1.0-rc.1').pull()
|
|
|
|
}
|
|
|
|
sh "./docker_db.sh cockroachdb_23_1"
|
|
|
|
state[buildEnv.tag]['containerName'] = "cockroach"
|
|
|
|
break;
|
2022-09-23 11:43:11 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
stage('Test') {
|
|
|
|
String cmd = "./ci/build.sh ${buildEnv.additionalOptions ?: ''} ${state[buildEnv.tag]['additionalOptions'] ?: ''}"
|
|
|
|
withEnv(["RDBMS=${buildEnv.dbName}"]) {
|
|
|
|
try {
|
|
|
|
if (buildEnv.dbLockableResource == null) {
|
2023-08-07 09:54:44 -04:00
|
|
|
withCredentials([file(credentialsId: 'sybase-jconnect-driver', variable: 'jconnect_driver')]) {
|
2023-08-15 10:11:20 -04:00
|
|
|
sh 'cp -f $jconnect_driver ./drivers/jconn4.jar'
|
2023-08-07 09:54:44 -04:00
|
|
|
timeout( [time: buildEnv.longRunning ? 480 : 120, unit: 'MINUTES'] ) {
|
|
|
|
sh cmd
|
|
|
|
}
|
2022-09-23 11:43:11 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
2022-10-21 10:06:45 -04:00
|
|
|
lock(label: buildEnv.dbLockableResource, quantity: 1, variable: 'LOCKED_RESOURCE') {
|
2022-10-21 04:28:17 -04:00
|
|
|
if ( buildEnv.dbLockResourceAsHost ) {
|
|
|
|
cmd += " -DdbHost=${LOCKED_RESOURCE}"
|
|
|
|
}
|
2022-11-03 10:15:01 -04:00
|
|
|
timeout( [time: buildEnv.longRunning ? 480 : 120, unit: 'MINUTES'] ) {
|
2022-09-23 11:43:11 -04:00
|
|
|
sh cmd
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
finally {
|
|
|
|
junit '**/target/test-results/test/*.xml,**/target/test-results/testKitTest/*.xml'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
finally {
|
|
|
|
if ( state[buildEnv.tag]['containerName'] != null ) {
|
|
|
|
sh "docker rm -f ${state[buildEnv.tag]['containerName']}"
|
|
|
|
}
|
|
|
|
// Skip this for PRs
|
|
|
|
if ( !env.CHANGE_ID && buildEnv.notificationRecipients != null ) {
|
|
|
|
handleNotifications(currentBuild, buildEnv)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
parallel(executions)
|
|
|
|
}
|
|
|
|
|
|
|
|
} // End of helper.runWithNotification
|
|
|
|
|
|
|
|
// Job-specific helpers
|
|
|
|
|
|
|
|
class BuildEnvironment {
|
|
|
|
String testJdkVersion
|
|
|
|
String testJdkLauncherArgs
|
|
|
|
String dbName = 'h2'
|
|
|
|
String node
|
|
|
|
String dbLockableResource
|
2022-10-21 04:28:17 -04:00
|
|
|
boolean dbLockResourceAsHost
|
2022-09-23 11:43:11 -04:00
|
|
|
String additionalOptions
|
|
|
|
String notificationRecipients
|
|
|
|
boolean longRunning
|
|
|
|
|
|
|
|
String toString() { getTag() }
|
|
|
|
String getTag() { "${node ? node + "_" : ''}${testJdkVersion ? 'jdk_' + testJdkVersion + '_' : '' }${dbName}" }
|
|
|
|
}
|
|
|
|
|
|
|
|
void runBuildOnNode(String label, Closure body) {
|
|
|
|
node( label ) {
|
|
|
|
pruneDockerContainers()
|
|
|
|
try {
|
|
|
|
body()
|
|
|
|
}
|
|
|
|
finally {
|
|
|
|
// If this is a PR, we clean the workspace at the end
|
|
|
|
if ( env.CHANGE_BRANCH != null ) {
|
|
|
|
cleanWs()
|
|
|
|
}
|
|
|
|
pruneDockerContainers()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
void pruneDockerContainers() {
|
|
|
|
if ( !sh( script: 'command -v docker || true', returnStdout: true ).trim().isEmpty() ) {
|
|
|
|
sh 'docker container prune -f || true'
|
|
|
|
sh 'docker image prune -f || true'
|
|
|
|
sh 'docker network prune -f || true'
|
|
|
|
sh 'docker volume prune -f || true'
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void handleNotifications(currentBuild, buildEnv) {
|
|
|
|
def currentResult = getParallelResult(currentBuild, buildEnv.tag)
|
|
|
|
boolean success = currentResult == 'SUCCESS' || currentResult == 'UNKNOWN'
|
|
|
|
def previousResult = currentBuild.previousBuild == null ? null : getParallelResult(currentBuild.previousBuild, buildEnv.tag)
|
|
|
|
|
|
|
|
// Ignore success after success
|
|
|
|
if ( !( success && previousResult == 'SUCCESS' ) ) {
|
|
|
|
def subject
|
|
|
|
def body
|
|
|
|
if ( success ) {
|
|
|
|
if ( previousResult != 'SUCCESS' && previousResult != null ) {
|
|
|
|
subject = "${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - Fixed"
|
|
|
|
body = """<p>${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - Fixed:</p>
|
|
|
|
<p>Check console output at <a href='${env.BUILD_URL}'>${env.BUILD_URL}</a> to view the results.</p>"""
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
subject = "${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - Success"
|
|
|
|
body = """<p>${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - Success:</p>
|
|
|
|
<p>Check console output at <a href='${env.BUILD_URL}'>${env.BUILD_URL}</a> to view the results.</p>"""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else if (currentBuild.rawBuild.getActions(jenkins.model.InterruptedBuildAction.class).isEmpty()) {
|
|
|
|
// If there are interrupted build actions, this means the build was cancelled, probably superseded
|
|
|
|
// Thanks to https://issues.jenkins.io/browse/JENKINS-43339 for the "hack" to determine this
|
|
|
|
if ( currentResult == 'FAILURE' ) {
|
|
|
|
if ( previousResult != null && previousResult == "FAILURE" ) {
|
|
|
|
subject = "${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - Still failing"
|
|
|
|
body = """<p>${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - Still failing:</p>
|
|
|
|
<p>Check console output at <a href='${env.BUILD_URL}'>${env.BUILD_URL}</a> to view the results.</p>"""
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
subject = "${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - Failure"
|
|
|
|
body = """<p>${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - Failure:</p>
|
|
|
|
<p>Check console output at <a href='${env.BUILD_URL}'>${env.BUILD_URL}</a> to view the results.</p>"""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
subject = "${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - ${currentResult}"
|
|
|
|
body = """<p>${env.JOB_NAME} - Build ${env.BUILD_NUMBER} - ${currentResult}:</p>
|
|
|
|
<p>Check console output at <a href='${env.BUILD_URL}'>${env.BUILD_URL}</a> to view the results.</p>"""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
emailext(
|
|
|
|
subject: subject,
|
|
|
|
body: body,
|
|
|
|
to: buildEnv.notificationRecipients
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
@NonCPS
|
|
|
|
String getParallelResult( RunWrapper build, String parallelBranchName ) {
|
|
|
|
def visitor = new PipelineNodeGraphVisitor( build.rawBuild )
|
|
|
|
def branch = visitor.pipelineNodes.find{ it.type == FlowNodeWrapper.NodeType.PARALLEL && parallelBranchName == it.displayName }
|
|
|
|
if ( branch == null ) {
|
|
|
|
echo "Couldn't find parallel branch name '$parallelBranchName'. Available parallel branch names:"
|
|
|
|
visitor.pipelineNodes.findAll{ it.type == FlowNodeWrapper.NodeType.PARALLEL }.each{
|
|
|
|
echo " - ${it.displayName}"
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
return branch.status.result
|
|
|
|
}
|