HBASE-23876 Add JDK11 compilation and unit test support to nightly job

Builds on the Dockerfile changes provided by HBASE-23767.

closes #1195

Signed-off-by: Sean Busbey <busbey@apache.org>
This commit is contained in:
Nick Dimiduk 2020-02-20 16:14:23 -08:00 committed by Nick Dimiduk
parent 055c07c7b7
commit 4c9d14a788
2 changed files with 122 additions and 35 deletions

View File

@ -35,8 +35,9 @@ pipeline {
// where we'll write everything from different steps. Need a copy here so the final step can check for success/failure. // where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
OUTPUT_DIR_RELATIVE_GENERAL = 'output-general' OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7' OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2' OUTPUT_DIR_RELATIVE_JDK8_HADOOP2 = 'output-jdk8-hadoop2'
OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3' OUTPUT_DIR_RELATIVE_JDK8_HADOOP3 = 'output-jdk8-hadoop3'
OUTPUT_DIR_RELATIVE_JDK11_HADOOP3 = 'output-jdk11-hadoop3'
PROJECT = 'hbase' PROJECT = 'hbase'
PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh' PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
@ -49,6 +50,9 @@ pipeline {
// These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure. // These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite' TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes" EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/excludes"
// TODO does hadoopcheck need to be jdk specific?
SHALLOW_CHECKS = 'all,-shadedjars,-unit' // run by the 'yetus general check'
DEEP_CHECKS = 'compile,htmlout,javac,maven,mvninstall,shadedjars,unit' // run by 'yetus jdkX (HadoopY) checks'
} }
parameters { parameters {
booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release. booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
@ -108,7 +112,7 @@ pipeline {
dir ("tools") { dir ("tools") {
sh """#!/usr/bin/env bash sh """#!/usr/bin/env bash
set -e set -e
echo "Downloading Project personality." echo "Downloading Project personality from ${env.PROJECT_PERSONALITY}"
curl -L -o personality.sh "${env.PROJECT_PERSONALITY}" curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
""" """
} }
@ -177,8 +181,9 @@ pipeline {
// we skip some due to e.g. branch-specific JDK or Hadoop support // we skip some due to e.g. branch-specific JDK or Hadoop support
stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match" stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match" stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match" stash name: 'jdk8-hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/doesn't-match"
stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match" stash name: 'jdk8-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/doesn't-match"
stash name: 'jdk11-hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/doesn't-match"
stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match" stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
} }
} }
@ -192,12 +197,8 @@ pipeline {
} }
environment { environment {
BASEDIR = "${env.WORKSPACE}/component" BASEDIR = "${env.WORKSPACE}/component"
// TODO does hadoopcheck need to be jdk specific? TESTS = "${env.SHALLOW_CHECKS}"
// Should be things that work with multijdk SET_JAVA_HOME = '/usr/lib/jvm/java-8'
TESTS = 'all,-unit,-findbugs'
// on branches that don't support jdk7, this will already be JAVA_HOME, so we'll end up not
// doing multijdk there.
MULTIJDK = '/usr/lib/jvm/java-8-openjdk-amd64'
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}" OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
} }
@ -264,10 +265,10 @@ pipeline {
} }
environment { environment {
BASEDIR = "${env.WORKSPACE}/component" BASEDIR = "${env.WORKSPACE}/component"
TESTS = 'maven,mvninstall,compile,javac,unit,htmlout' TESTS = "${env.DEEP_CHECKS}"
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}" OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
// On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already. SET_JAVA_HOME = "/usr/lib/jvm/java-7"
} }
steps { steps {
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
@ -342,12 +343,10 @@ pipeline {
} }
environment { environment {
BASEDIR = "${env.WORKSPACE}/component" BASEDIR = "${env.WORKSPACE}/component"
TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout' TESTS = "${env.DEEP_CHECKS}"
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}" OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}"
// This isn't strictly needed on branches that only support jdk8, but doesn't hurt SET_JAVA_HOME = '/usr/lib/jvm/java-8'
// and is needed on branches that do both jdk7 and jdk8
SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
} }
steps { steps {
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile // Must do prior to anything else, since if one of them timesout we'll stash the commentfile
@ -383,7 +382,7 @@ pipeline {
} }
post { post {
always { always {
stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile" stash name: 'jdk8-hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
// zip surefire reports. // zip surefire reports.
sh '''#!/bin/bash -e sh '''#!/bin/bash -e
@ -427,12 +426,10 @@ pipeline {
} }
environment { environment {
BASEDIR = "${env.WORKSPACE}/component" BASEDIR = "${env.WORKSPACE}/component"
TESTS = 'maven,mvninstall,compile,javac,unit,htmlout' TESTS = "${env.DEEP_CHECKS}"
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}" OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}" OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}"
// This isn't strictly needed on branches that only support jdk8, but doesn't hurt SET_JAVA_HOME = '/usr/lib/jvm/java-8'
// and is needed on branches that do both jdk7 and jdk8
SET_JAVA_HOME = '/usr/lib/jvm/java-8-openjdk-amd64'
// Activates hadoop 3.0 profile in maven runs. // Activates hadoop 3.0 profile in maven runs.
HADOOP_PROFILE = '3.0' HADOOP_PROFILE = '3.0'
} }
@ -470,7 +467,7 @@ pipeline {
} }
post { post {
always { always {
stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile" stash name: 'jdk8-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
// zip surefire reports. // zip surefire reports.
sh '''#!/bin/bash -e sh '''#!/bin/bash -e
@ -501,8 +498,96 @@ pipeline {
} }
} }
} }
stage ('yetus jdk11 hadoop3 checks') {
agent {
node {
label 'Hadoop'
}
}
when {
not {
branch 'branch-1*'
}
}
environment {
BASEDIR = "${env.WORKSPACE}/component"
TESTS = "${env.DEEP_CHECKS}"
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}"
SET_JAVA_HOME = "/usr/lib/jvm/java-11"
// Activates hadoop 3.0 profile in maven runs.
HADOOP_PROFILE = '3.0'
// ErrorProne is broken on JDK11, see HBASE-23894
SKIP_ERROR_PRONE = 'true'
}
steps {
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
sh '''#!/usr/bin/env bash
set -e
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
'''
unstash 'yetus'
dir('component') {
checkout scm
}
sh '''#!/usr/bin/env bash
set -e
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
'''
sh '''#!/usr/bin/env bash
set -e
declare -i status=0
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
echo '(/) {color:green}+1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
else
echo '(x) {color:red}-1 jdk11 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
status=1
fi
echo "-- For more information [see jdk11 report|${BUILD_URL}/JDK11_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
exit "${status}"
'''
}
post {
always {
stash name: 'jdk11-hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
// zip surefire reports.
sh '''#!/bin/bash -e
if [ -d "${OUTPUT_DIR}/archiver" ]; then
count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
if [[ 0 -ne ${count} ]]; then
echo "zipping ${count} archived files"
zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
else
echo "No archived files, skipping compressing."
fi
else
echo "No archiver directory, skipping compressing."
fi
'''
// Has to be relative to WORKSPACE.
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts artifacts: "${env.OUTPUT_DIR_RELATIVE}/**/*"
publishHTML target: [
allowMissing : true,
keepAll : true,
alwaysLinkToLastBuild: true,
// Has to be relative to WORKSPACE.
reportDir : "${env.OUTPUT_DIR_RELATIVE}",
reportFiles : 'console-report.html',
reportName : 'JDK11 Nightly Build Report'
]
}
}
}
// This is meant to mimic what a release manager will do to create RCs. // This is meant to mimic what a release manager will do to create RCs.
// See http://hbase.apache.org/book.html#maven.release // See http://hbase.apache.org/book.html#maven.release
// TODO (HBASE-23870): replace this with invocation of the release tool
stage ('packaging and integration') { stage ('packaging and integration') {
tools { tools {
maven 'Maven (latest)' maven 'Maven (latest)'
@ -636,14 +721,16 @@ pipeline {
try { try {
unstash 'general-result' unstash 'general-result'
unstash 'jdk7-result' unstash 'jdk7-result'
unstash 'hadoop2-result' unstash 'jdk8-hadoop2-result'
unstash 'hadoop3-result' unstash 'jdk8-hadoop3-result'
unstash 'jdk11-hadoop3-result'
unstash 'srctarball-result' unstash 'srctarball-result'
sh "printenv" sh "printenv"
def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile", def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile", "${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile", "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP2}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile", "${env.OUTPUT_DIR_RELATIVE_JDK8_HADOOP3}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK11_HADOOP3}/commentfile",
'output-srctarball/commentfile', 'output-srctarball/commentfile',
'output-integration/commentfile'] 'output-integration/commentfile']
echo env.BRANCH_NAME echo env.BRANCH_NAME

View File

@ -35,10 +35,6 @@ if [ ${missing_env} -gt 0 ]; then
fi fi
YETUS_ARGS=() YETUS_ARGS=()
if [[ -n "${MULTIJDK}" ]]; then
YETUS_ARGS=("--multijdktests=compile,javadoc" "${YETUS_ARGS[@]}")
YETUS_ARGS=("--multijdkdirs=${MULTIJDK}" "${YETUS_ARGS[@]}")
fi
# If we're doing docker, make sure we don't accidentally pollute the image with a host java path # If we're doing docker, make sure we don't accidentally pollute the image with a host java path
if [ -n "${JAVA_HOME}" ]; then if [ -n "${JAVA_HOME}" ]; then
@ -85,6 +81,10 @@ if [[ -n "${HADOOP_PROFILE}" ]]; then
YETUS_ARGS=("--hadoop-profile=${HADOOP_PROFILE}" "${YETUS_ARGS[@]}") YETUS_ARGS=("--hadoop-profile=${HADOOP_PROFILE}" "${YETUS_ARGS[@]}")
fi fi
if [[ -n "${SKIP_ERROR_PRONE}" ]]; then
YETUS_ARGS=("--skip-errorprone" "${YETUS_ARGS[@]}")
fi
if [[ true == "${DEBUG}" ]]; then if [[ true == "${DEBUG}" ]]; then
YETUS_ARGS=("--debug" "${YETUS_ARGS[@]}") YETUS_ARGS=("--debug" "${YETUS_ARGS[@]}")
fi fi