2017-07-20 18:07:33 -04:00
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
pipeline {
2018-03-01 17:34:08 -05:00
agent {
node {
2020-10-01 02:46:03 -04:00
label 'hbase'
2018-03-01 17:34:08 -05:00
}
}
2017-07-20 18:07:33 -04:00
triggers {
2020-03-18 17:37:28 -04:00
pollSCM('@daily')
2017-07-20 18:07:33 -04:00
}
options {
2021-10-29 04:54:59 -04:00
buildDiscarder(logRotator(numToKeepStr: '20'))
2020-10-09 20:22:12 -04:00
timeout (time: 16, unit: 'HOURS')
2017-07-20 18:07:33 -04:00
timestamps()
2017-08-06 15:46:56 -04:00
skipDefaultCheckout()
2019-09-23 09:00:22 -04:00
disableConcurrentBuilds()
2017-07-20 18:07:33 -04:00
}
environment {
2020-10-29 19:07:49 -04:00
YETUS_RELEASE = '0.12.0'
2017-08-09 01:48:46 -04:00
// where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
2017-07-20 02:01:59 -04:00
PROJECT = 'hbase'
2017-12-21 14:20:40 -05:00
PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
2018-03-08 21:27:21 -05:00
PERSONALITY_FILE = 'tools/personality.sh'
2017-07-20 02:01:59 -04:00
// This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
WHITESPACE_IGNORE_LIST = '.*/generated/.*'
// output from surefire; sadly the archive function in yetus only works on file names.
2017-11-09 10:16:17 -05:00
ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
2017-07-20 02:01:59 -04:00
// These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
2017-11-28 16:32:26 -05:00
TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite,xml'
2020-10-20 05:00:06 -04:00
EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
2021-10-07 09:57:21 -04:00
ASF_NIGHTLIES = 'https://nightlies.apache.org'
2021-10-21 11:07:30 -04:00
ASF_NIGHTLIES_BASE_ORI = "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}"
ASF_NIGHTLIES_BASE = "${ASF_NIGHTLIES_BASE_ORI.replaceAll(' ', '%20')}"
2017-07-20 18:07:33 -04:00
}
parameters {
booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
}
stages {
2017-07-20 02:01:59 -04:00
stage ('yetus install') {
2017-07-20 18:07:33 -04:00
steps {
sh '''#!/usr/bin/env bash
2018-04-03 13:08:05 -04:00
set -e
2017-07-20 18:07:33 -04:00
echo "Ensure we have a copy of Apache Yetus."
if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
2018-10-11 11:32:12 -04:00
if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
2017-07-20 18:07:33 -04:00
echo "New download of Apache Yetus version ${YETUS_RELEASE}."
2018-10-11 11:32:12 -04:00
rm -rf "${YETUS_DIR}"
2017-07-20 18:07:33 -04:00
rm -rf "${WORKSPACE}/.gpg"
mkdir -p "${WORKSPACE}/.gpg"
chmod -R 700 "${WORKSPACE}/.gpg"
echo "install yetus project KEYS"
curl -L --fail -o "${WORKSPACE}/KEYS_YETUS" https://dist.apache.org/repos/dist/release/yetus/KEYS
gpg --homedir "${WORKSPACE}/.gpg" --import "${WORKSPACE}/KEYS_YETUS"
echo "download yetus release ${YETUS_RELEASE}"
2019-04-06 01:13:18 -04:00
curl -L --fail -O "https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
curl -L --fail -O "https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
2017-07-20 18:07:33 -04:00
echo "verifying yetus release"
2019-04-06 01:13:18 -04:00
gpg --homedir "${WORKSPACE}/.gpg" --verify "apache-yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
mv "apache-yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
2017-07-20 18:07:33 -04:00
else
echo "Reusing cached download of Apache Yetus version ${YETUS_RELEASE}."
fi
else
YETUS_DIR="${WORKSPACE}/yetus-git"
rm -rf "${YETUS_DIR}"
echo "downloading from github"
curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
fi
if [ ! -d "${YETUS_DIR}" ]; then
echo "unpacking yetus into '${YETUS_DIR}'"
mkdir -p "${YETUS_DIR}"
gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
fi
'''
2018-03-08 21:27:21 -05:00
// Set up the file we need at PERSONALITY_FILE location
dir ("tools") {
2017-07-20 18:07:33 -04:00
sh """#!/usr/bin/env bash
2018-04-03 13:08:05 -04:00
set -e
2017-07-20 18:07:33 -04:00
echo "Downloading Project personality."
2017-12-21 14:20:40 -05:00
curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
2017-07-20 18:07:33 -04:00
"""
}
2017-07-20 02:01:59 -04:00
stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
2017-07-20 18:07:33 -04:00
}
}
2018-03-01 17:34:08 -05:00
stage ('init health results') {
steps {
// stash with given name for all tests we might run, so that we can unstash all of them even if
// we skip some due to e.g. branch-specific JDK or Hadoop support
stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
}
}
2017-08-09 01:48:46 -04:00
stage ('health checks') {
parallel {
stage ('yetus general check') {
2018-03-01 17:34:08 -05:00
agent {
node {
2020-10-01 02:46:03 -04:00
label 'hbase'
2018-03-01 17:34:08 -05:00
}
}
2017-08-09 01:48:46 -04:00
environment {
2018-03-08 21:27:21 -05:00
BASEDIR = "${env.WORKSPACE}/component"
2017-08-09 01:48:46 -04:00
// TODO does hadoopcheck need to be jdk specific?
// Should be things that work with multijdk
TESTS = 'all,-unit,-findbugs'
2019-09-12 21:35:29 -04:00
// all jdks tested on this stage for this branch
// note that JAVA_HOME still needs to be set for tests
// that don't support multijdk. JAVA_HOME will also
// determine which jvm will be tested last.
MULTIJDK = '/usr/lib/jvm/zulu-8-amd64,/usr/lib/jvm/zulu-7-amd64'
2017-08-09 01:48:46 -04:00
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
2019-09-22 14:35:50 -04:00
SKIP_ERRORPRONE = 'true'
2021-10-21 11:07:30 -04:00
ASF_NIGHTLIES_GENERAL_CHECK_BASE="${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}"
2017-08-09 01:48:46 -04:00
}
steps {
2018-04-13 00:38:27 -04:00
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
sh '''#!/usr/bin/env bash
set -e
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
2021-10-08 21:43:43 -04:00
'''
2017-08-09 01:48:46 -04:00
unstash 'yetus'
2018-03-01 17:34:08 -05:00
dir('component') {
checkout scm
}
2017-08-09 01:48:46 -04:00
sh '''#!/usr/bin/env bash
2018-04-03 13:08:05 -04:00
set -e
2017-08-09 01:48:46 -04:00
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
2018-04-11 11:38:12 -04:00
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
2017-08-09 01:48:46 -04:00
'''
2021-10-08 21:43:43 -04:00
// TODO roll this into the hbase_nightly_yetus script
script {
def ret = sh(
returnStatus: true,
script: '''#!/usr/bin/env bash
set -e
declare -i status=0
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
else
echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
status=1
fi
echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
exit "${status}"
'''
)
if (ret != 0) {
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
// test output. See HBASE-26339 for more details.
currentBuild.result = 'UNSTABLE'
}
}
2017-08-09 01:48:46 -04:00
}
post {
always {
2018-03-01 17:34:08 -05:00
stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
2021-10-21 11:07:30 -04:00
sshPublisher(publishers: [
sshPublisherDesc(configName: 'Nightlies',
transfers: [
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/*-site/*,${env.OUTPUT_DIR_RELATIVE}/*-site/**/*"
)
]
)
])
sh '''#!/bin/bash -e
if [ -d "${OUTPUT_DIR}/branch-site" ]; then
echo "Remove ${OUTPUT_DIR}/branch-site for saving space"
rm -rf "${OUTPUT_DIR}/branch-site"
python ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/branch-site" > "${OUTPUT_DIR}/branch-site.html"
else
echo "No branch-site, skipping"
fi
if [ -d "${OUTPUT_DIR}/patch-site" ]; then
echo "Remove ${OUTPUT_DIR}/patch-site for saving space"
rm -rf "${OUTPUT_DIR}/patch-site"
python ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/patch-site" > "${OUTPUT_DIR}/patch-site.html"
else
echo "No patch-site, skipping"
fi
'''
2017-08-09 01:48:46 -04:00
// Has to be relative to WORKSPACE.
2019-10-10 07:27:50 -04:00
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/**/*"
2017-08-09 01:48:46 -04:00
publishHTML target: [
allowMissing: true,
keepAll: true,
alwaysLinkToLastBuild: true,
// Has to be relative to WORKSPACE
reportDir: "${env.OUTPUT_DIR_RELATIVE}",
reportFiles: 'console-report.html',
reportName: 'General Nightly Build Report'
]
}
}
2017-07-20 02:01:59 -04:00
}
2017-08-09 01:48:46 -04:00
stage ('yetus jdk7 checks') {
2018-03-01 17:34:08 -05:00
agent {
node {
2020-10-01 02:46:03 -04:00
label 'hbase'
2018-03-01 17:34:08 -05:00
}
}
2017-08-09 01:48:46 -04:00
when {
branch 'branch-1*'
}
environment {
2018-03-08 21:27:21 -05:00
BASEDIR = "${env.WORKSPACE}/component"
2018-05-16 09:58:00 -04:00
TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
2017-08-09 01:48:46 -04:00
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
// On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
2019-09-22 14:35:50 -04:00
SKIP_ERRORPRONE = 'true'
2017-08-09 01:48:46 -04:00
}
steps {
2018-04-13 00:38:27 -04:00
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
sh '''#!/usr/bin/env bash
set -e
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
2021-10-08 21:43:43 -04:00
'''
2017-08-09 01:48:46 -04:00
unstash 'yetus'
2018-03-01 17:34:08 -05:00
dir('component') {
checkout scm
}
2017-08-09 01:48:46 -04:00
sh '''#!/usr/bin/env bash
2018-04-03 13:08:05 -04:00
set -e
2017-08-09 01:48:46 -04:00
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
2018-04-11 11:38:12 -04:00
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
2017-08-09 01:48:46 -04:00
'''
2021-10-08 21:43:43 -04:00
script {
def ret = sh(
returnStatus: true,
script: '''#!/usr/bin/env bash
set -e
declare -i status=0
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
else
echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
status=1
fi
echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
exit "${status}"
'''
)
if (ret != 0) {
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
// test output. See HBASE-26339 for more details.
currentBuild.result = 'UNSTABLE'
}
}
2017-08-09 01:48:46 -04:00
}
post {
always {
2018-03-01 17:34:08 -05:00
stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
2017-08-09 01:48:46 -04:00
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
// zip surefire reports.
sh '''#!/bin/bash -e
if [ -d "${OUTPUT_DIR}/archiver" ]; then
count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
if [[ 0 -ne ${count} ]]; then
echo "zipping ${count} archived files"
zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
else
echo "No archived files, skipping compressing."
fi
else
echo "No archiver directory, skipping compressing."
fi
2021-10-07 09:57:21 -04:00
'''
sshPublisher(publishers: [
sshPublisherDesc(configName: 'Nightlies',
transfers: [
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
)
]
)
])
2021-10-25 11:32:15 -04:00
// remove the big test logs zip file, store the nightlies url in test_logs.html
2021-10-07 09:57:21 -04:00
sh '''#!/bin/bash -e
if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
rm -rf "${OUTPUT_DIR}/test_logs.zip"
2021-10-21 11:07:30 -04:00
python ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
2021-10-07 09:57:21 -04:00
else
echo "No test_logs.zip, skipping"
fi
2017-07-20 18:07:33 -04:00
'''
2017-08-09 01:48:46 -04:00
// Has to be relative to WORKSPACE.
2019-10-10 07:27:50 -04:00
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/**/*"
2017-08-09 01:48:46 -04:00
publishHTML target: [
allowMissing : true,
keepAll : true,
alwaysLinkToLastBuild: true,
// Has to be relative to WORKSPACE.
reportDir : "${env.OUTPUT_DIR_RELATIVE}",
reportFiles : 'console-report.html',
reportName : 'JDK7 Nightly Build Report'
]
}
}
2017-07-20 02:01:59 -04:00
}
2017-08-09 01:48:46 -04:00
stage ('yetus jdk8 hadoop2 checks') {
2018-03-01 17:34:08 -05:00
agent {
node {
2020-10-01 02:46:03 -04:00
label 'hbase'
2018-03-01 17:34:08 -05:00
}
}
2017-08-09 01:48:46 -04:00
environment {
2018-03-08 21:27:21 -05:00
BASEDIR = "${env.WORKSPACE}/component"
2018-05-16 09:58:00 -04:00
TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
2017-08-09 01:48:46 -04:00
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
2019-09-12 21:35:29 -04:00
// this is branch-1 specific. the branch-2+ version uses openjdk artifacts
SET_JAVA_HOME = '/usr/lib/jvm/zulu-8-amd64'
2017-08-09 01:48:46 -04:00
}
steps {
2018-04-13 00:38:27 -04:00
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
sh '''#!/usr/bin/env bash
set -e
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
2021-10-08 21:43:43 -04:00
'''
2017-08-09 01:48:46 -04:00
unstash 'yetus'
2018-03-01 17:34:08 -05:00
dir('component') {
checkout scm
}
2017-08-09 01:48:46 -04:00
sh '''#!/usr/bin/env bash
2018-04-03 13:08:05 -04:00
set -e
2017-08-09 01:48:46 -04:00
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
2018-04-11 11:38:12 -04:00
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
2017-08-09 01:48:46 -04:00
'''
2021-10-08 21:43:43 -04:00
script {
def ret = sh(
returnStatus: true,
script: '''#!/usr/bin/env bash
set -e
declare -i status=0
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
else
echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
status=1
fi
echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
exit "${status}"
'''
)
if (ret != 0) {
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
// test output. See HBASE-26339 for more details.
currentBuild.result = 'UNSTABLE'
}
}
2017-08-09 01:48:46 -04:00
}
post {
always {
2018-03-01 17:34:08 -05:00
stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
2017-08-09 01:48:46 -04:00
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
// zip surefire reports.
sh '''#!/bin/bash -e
if [ -d "${OUTPUT_DIR}/archiver" ]; then
count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
if [[ 0 -ne ${count} ]]; then
echo "zipping ${count} archived files"
zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
else
echo "No archived files, skipping compressing."
fi
else
echo "No archiver directory, skipping compressing."
fi
2021-10-07 09:57:21 -04:00
'''
sshPublisher(publishers: [
sshPublisherDesc(configName: 'Nightlies',
transfers: [
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
)
]
)
])
2021-10-25 11:32:15 -04:00
// remove the big test logs zip file, store the nightlies url in test_logs.html
2021-10-07 09:57:21 -04:00
sh '''#!/bin/bash -e
if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
rm -rf "${OUTPUT_DIR}/test_logs.zip"
2021-10-21 11:07:30 -04:00
python ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
2021-10-07 09:57:21 -04:00
else
echo "No test_logs.zip, skipping"
fi
2017-07-20 02:01:59 -04:00
'''
2017-08-09 01:48:46 -04:00
// Has to be relative to WORKSPACE.
2019-10-10 07:27:50 -04:00
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/**/*"
2017-08-09 01:48:46 -04:00
publishHTML target: [
allowMissing : true,
keepAll : true,
alwaysLinkToLastBuild: true,
// Has to be relative to WORKSPACE.
reportDir : "${env.OUTPUT_DIR_RELATIVE}",
reportFiles : 'console-report.html',
reportName : 'JDK8 Nightly Build Report (Hadoop2)'
]
}
}
2017-12-21 14:20:40 -05:00
}
2017-08-09 01:48:46 -04:00
stage ('yetus jdk8 hadoop3 checks') {
2018-03-01 17:34:08 -05:00
agent {
node {
2020-10-01 02:46:03 -04:00
label 'hbase'
2018-03-01 17:34:08 -05:00
}
}
2017-08-09 01:48:46 -04:00
when {
not {
branch 'branch-1*'
}
}
environment {
2018-03-08 21:27:21 -05:00
BASEDIR = "${env.WORKSPACE}/component"
2018-05-16 09:58:00 -04:00
TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
2017-08-09 01:48:46 -04:00
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
2019-09-12 21:35:29 -04:00
// this is branch-1 specific. the branch-2+ version uses openjdk artifacts
SET_JAVA_HOME = '/usr/lib/jvm/zulu-8-amd64'
2017-08-09 01:48:46 -04:00
// Activates hadoop 3.0 profile in maven runs.
HADOOP_PROFILE = '3.0'
}
steps {
2018-04-13 00:38:27 -04:00
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
sh '''#!/usr/bin/env bash
set -e
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
2021-10-08 21:43:43 -04:00
'''
2017-08-09 01:48:46 -04:00
unstash 'yetus'
2018-03-01 17:34:08 -05:00
dir('component') {
checkout scm
}
2017-08-09 01:48:46 -04:00
sh '''#!/usr/bin/env bash
2018-04-03 13:08:05 -04:00
set -e
2017-08-09 01:48:46 -04:00
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
2018-04-11 11:38:12 -04:00
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
2017-08-09 01:48:46 -04:00
'''
2021-10-08 21:43:43 -04:00
script {
def ret = sh(
returnStatus: true,
script: '''#!/usr/bin/env bash
set -e
declare -i status=0
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
else
echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
status=1
fi
echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
exit "${status}"
'''
)
if (ret != 0) {
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
// test output. See HBASE-26339 for more details.
currentBuild.result = 'UNSTABLE'
}
}
2017-08-09 01:48:46 -04:00
}
post {
always {
2018-03-01 17:34:08 -05:00
stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
2018-04-13 01:08:39 -04:00
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
2017-08-09 01:48:46 -04:00
// zip surefire reports.
sh '''#!/bin/bash -e
if [ -d "${OUTPUT_DIR}/archiver" ]; then
count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
if [[ 0 -ne ${count} ]]; then
echo "zipping ${count} archived files"
zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
else
echo "No archived files, skipping compressing."
fi
else
echo "No archiver directory, skipping compressing."
fi
2021-10-07 09:57:21 -04:00
'''
sshPublisher(publishers: [
sshPublisherDesc(configName: 'Nightlies',
transfers: [
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
)
]
)
])
2021-10-25 11:32:15 -04:00
// remove the big test logs zip file, store the nightlies url in test_logs.html
2021-10-07 09:57:21 -04:00
sh '''#!/bin/bash -e
if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
rm -rf "${OUTPUT_DIR}/test_logs.zip"
2021-10-21 11:07:30 -04:00
python ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
2021-10-07 09:57:21 -04:00
else
echo "No test_logs.zip, skipping"
fi
2017-12-21 14:20:40 -05:00
'''
2017-08-09 01:48:46 -04:00
// Has to be relative to WORKSPACE.
2019-10-10 07:27:50 -04:00
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/*"
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/**/*"
2017-08-09 01:48:46 -04:00
publishHTML target: [
allowMissing : true,
keepAll : true,
alwaysLinkToLastBuild: true,
// Has to be relative to WORKSPACE.
reportDir : "${env.OUTPUT_DIR_RELATIVE}",
reportFiles : 'console-report.html',
reportName : 'JDK8 Nightly Build Report (Hadoop3)'
]
}
}
2017-07-20 02:01:59 -04:00
}
2017-08-09 01:48:46 -04:00
// This is meant to mimic what a release manager will do to create RCs.
// See http://hbase.apache.org/book.html#maven.release
stage ('create source tarball') {
tools {
2020-10-20 04:58:43 -04:00
maven 'maven_latest'
2017-08-09 01:48:46 -04:00
// this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
2020-10-20 04:58:43 -04:00
jdk "jdk_1.7_latest"
2017-08-09 01:48:46 -04:00
}
2018-03-08 21:27:21 -05:00
environment {
BASEDIR = "${env.WORKSPACE}/component"
}
2017-08-09 01:48:46 -04:00
steps {
sh '''#!/bin/bash -e
echo "Setting up directories"
rm -rf "output-srctarball" && mkdir "output-srctarball"
rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
rm -rf ".m2-for-src" && mkdir ".m2-for-src"
2018-04-13 00:38:27 -04:00
echo '(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console].' >output-srctarball/commentfile
2021-10-25 11:32:15 -04:00
'''
2018-03-01 17:34:08 -05:00
dir('component') {
checkout scm
}
2017-08-09 01:48:46 -04:00
sh '''#!/usr/bin/env bash
2018-04-03 13:08:05 -04:00
set -e
2017-08-09 01:48:46 -04:00
rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
"${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
2018-04-11 11:38:12 -04:00
echo "got the following saved stats in 'output-srctarball/machine'"
ls -lh "output-srctarball/machine"
2021-10-25 11:32:15 -04:00
'''
2017-08-09 01:48:46 -04:00
sh """#!/bin/bash -e
if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
--intermediate-file-dir output-srctarball \
--unpack-temp-dir unpacked_src_tarball \
--maven-m2-initial .m2-for-repo \
--maven-m2-src-build .m2-for-src \
--clean-source-checkout \
"${env.BASEDIR}" ; then
echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
else
echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
2018-05-24 09:33:32 -04:00
exit 1
2017-08-09 01:48:46 -04:00
fi
2021-10-25 11:32:15 -04:00
"""
2017-08-09 01:48:46 -04:00
}
post {
always {
2018-03-01 17:34:08 -05:00
stash name: 'srctarball-result', includes: "output-srctarball/commentfile"
2021-10-25 11:32:15 -04:00
sshPublisher(publishers: [
sshPublisherDesc(configName: 'Nightlies',
transfers: [
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
sourceFiles: "output-srctarball/hbase-src.tar.gz"
)
]
)
])
// remove the big src tarball, store the nightlies url in hbase-src.html
sh '''#!/bin/bash -e
SRC_TAR="${WORKSPACE}/output-srctarball/hbase-src.tar.gz"
if [ -f "${SRC_TAR}" ]; then
echo "Remove ${SRC_TAR} for saving space"
rm -rf "${SRC_TAR}"
python ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/output-srctarball" > "${WORKSPACE}/output-srctarball/hbase-src.html"
else
echo "No hbase-src.tar.gz, skipping"
fi
'''
2019-10-10 07:27:50 -04:00
archiveArtifacts 'output-srctarball/*'
archiveArtifacts 'output-srctarball/**/*'
2017-08-09 01:48:46 -04:00
}
}
}
2017-11-01 09:12:03 -04:00
}
2017-08-09 01:48:46 -04:00
}
}
post {
always {
script {
try {
2018-03-01 17:34:08 -05:00
unstash 'general-result'
unstash 'jdk7-result'
unstash 'hadoop2-result'
unstash 'hadoop3-result'
unstash 'srctarball-result'
2017-08-09 01:48:46 -04:00
sh "printenv"
def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
"${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
'output-srctarball/commentfile']
echo env.BRANCH_NAME
echo env.BUILD_URL
echo currentBuild.result
echo currentBuild.durationString
2018-03-01 17:34:08 -05:00
def comment = "Results for branch ${env.BRANCH_NAME}\n"
comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
2018-04-13 00:38:27 -04:00
if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
2018-03-01 17:34:08 -05:00
comment += "(/) *{color:green}+1 overall{color}*\n"
2017-08-09 01:48:46 -04:00
} else {
comment += "(x) *{color:red}-1 overall{color}*\n"
// Ideally get the committer our of the change and @ mention them in the per-jira comment
}
2018-03-01 17:34:08 -05:00
comment += "----\ndetails (if available):\n\n"
2017-08-09 01:48:46 -04:00
echo ""
echo "[DEBUG] trying to aggregate step-wise results"
comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
echo "[INFO] Comment:"
echo comment
echo ""
echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
getJirasToComment(currentBuild).each { currentIssue ->
jiraComment issueKey: currentIssue, body: comment
}
} catch (Exception exception) {
echo "Got exception: ${exception}"
echo " ${exception.getStackTrace()}"
}
}
}
}
}
import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
@NonCPS
List<String> getJirasToComment(RunWrapper thisBuild) {
def seenJiras = []
thisBuild.changeSets.each { cs ->
cs.getItems().each { change ->
CharSequence msg = change.msg
echo "change: ${change}"
echo " ${msg}"
echo " ${change.commitId}"
echo " ${change.author}"
echo ""
msg.eachMatch("HBASE-[0-9]+") { currentIssue ->
echo "[DEBUG] found jira key: ${currentIssue}"
if (currentIssue in seenJiras) {
echo "[DEBUG] already commented on ${currentIssue}."
} else {
echo "[INFO] commenting on ${currentIssue}."
seenJiras << currentIssue
2017-11-01 09:12:03 -04:00
}
2017-07-31 02:08:16 -04:00
}
}
2017-07-20 02:01:59 -04:00
}
2017-08-09 01:48:46 -04:00
return seenJiras
2017-07-20 18:07:33 -04:00
}