HBASE-27287 Remove jenkins files for branch-1 (#4688)
Signed-off-by: Xin Sun <ddupgs@gmail.com>
This commit is contained in:
parent
e6150c70f8
commit
83869291cd
|
@ -1,722 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'hbase'
|
||||
}
|
||||
}
|
||||
triggers {
|
||||
pollSCM('@daily')
|
||||
}
|
||||
options {
|
||||
buildDiscarder(logRotator(numToKeepStr: '20'))
|
||||
timeout (time: 16, unit: 'HOURS')
|
||||
timestamps()
|
||||
skipDefaultCheckout()
|
||||
disableConcurrentBuilds()
|
||||
}
|
||||
environment {
|
||||
YETUS_RELEASE = '0.12.0'
|
||||
// where we'll write everything from different steps. Need a copy here so the final step can check for success/failure.
|
||||
OUTPUT_DIR_RELATIVE_GENERAL = 'output-general'
|
||||
OUTPUT_DIR_RELATIVE_JDK7 = 'output-jdk7'
|
||||
OUTPUT_DIR_RELATIVE_HADOOP2 = 'output-jdk8-hadoop2'
|
||||
OUTPUT_DIR_RELATIVE_HADOOP3 = 'output-jdk8-hadoop3'
|
||||
|
||||
PROJECT = 'hbase'
|
||||
PROJECT_PERSONALITY = 'https://raw.githubusercontent.com/apache/hbase/master/dev-support/hbase-personality.sh'
|
||||
PERSONALITY_FILE = 'tools/personality.sh'
|
||||
// This section of the docs tells folks not to use the javadoc tag. older branches have our old version of the check for said tag.
|
||||
AUTHOR_IGNORE_LIST = 'src/main/asciidoc/_chapters/developer.adoc,dev-support/test-patch.sh'
|
||||
WHITESPACE_IGNORE_LIST = '.*/generated/.*'
|
||||
// output from surefire; sadly the archive function in yetus only works on file names.
|
||||
ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
|
||||
// These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
|
||||
TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite,xml'
|
||||
EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/excludes"
|
||||
ASF_NIGHTLIES = 'https://nightlies.apache.org'
|
||||
ASF_NIGHTLIES_BASE_ORI = "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}"
|
||||
ASF_NIGHTLIES_BASE = "${ASF_NIGHTLIES_BASE_ORI.replaceAll(' ', '%20')}"
|
||||
}
|
||||
parameters {
|
||||
booleanParam(name: 'USE_YETUS_PRERELEASE', defaultValue: false, description: '''Check to use the current HEAD of apache/yetus rather than our configured release.
|
||||
|
||||
Should only be used manually when e.g. there is some non-work-aroundable issue in yetus we are checking a fix for.''')
|
||||
booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
|
||||
}
|
||||
stages {
|
||||
stage ('yetus install') {
|
||||
steps {
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
echo "Ensure we have a copy of Apache Yetus."
|
||||
if [[ true != "${USE_YETUS_PRERELEASE}" ]]; then
|
||||
YETUS_DIR="${WORKSPACE}/yetus-${YETUS_RELEASE}"
|
||||
echo "Checking for Yetus ${YETUS_RELEASE} in '${YETUS_DIR}'"
|
||||
if ! "${YETUS_DIR}/bin/test-patch" --version >/dev/null 2>&1 ; then
|
||||
echo "New download of Apache Yetus version ${YETUS_RELEASE}."
|
||||
rm -rf "${YETUS_DIR}"
|
||||
rm -rf "${WORKSPACE}/.gpg"
|
||||
mkdir -p "${WORKSPACE}/.gpg"
|
||||
chmod -R 700 "${WORKSPACE}/.gpg"
|
||||
|
||||
echo "install yetus project KEYS"
|
||||
curl -L --fail -o "${WORKSPACE}/KEYS_YETUS" https://dist.apache.org/repos/dist/release/yetus/KEYS
|
||||
gpg --homedir "${WORKSPACE}/.gpg" --import "${WORKSPACE}/KEYS_YETUS"
|
||||
|
||||
echo "download yetus release ${YETUS_RELEASE}"
|
||||
curl -L --fail -O "https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz"
|
||||
curl -L --fail -O "https://dist.apache.org/repos/dist/release/yetus/${YETUS_RELEASE}/apache-yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
|
||||
echo "verifying yetus release"
|
||||
gpg --homedir "${WORKSPACE}/.gpg" --verify "apache-yetus-${YETUS_RELEASE}-bin.tar.gz.asc"
|
||||
mv "apache-yetus-${YETUS_RELEASE}-bin.tar.gz" yetus.tar.gz
|
||||
else
|
||||
echo "Reusing cached download of Apache Yetus version ${YETUS_RELEASE}."
|
||||
fi
|
||||
else
|
||||
YETUS_DIR="${WORKSPACE}/yetus-git"
|
||||
rm -rf "${YETUS_DIR}"
|
||||
echo "downloading from github"
|
||||
curl -L --fail https://api.github.com/repos/apache/yetus/tarball/HEAD -o yetus.tar.gz
|
||||
fi
|
||||
if [ ! -d "${YETUS_DIR}" ]; then
|
||||
echo "unpacking yetus into '${YETUS_DIR}'"
|
||||
mkdir -p "${YETUS_DIR}"
|
||||
gunzip -c yetus.tar.gz | tar xpf - -C "${YETUS_DIR}" --strip-components 1
|
||||
fi
|
||||
'''
|
||||
// Set up the file we need at PERSONALITY_FILE location
|
||||
dir ("tools") {
|
||||
sh """#!/usr/bin/env bash
|
||||
set -e
|
||||
echo "Downloading Project personality."
|
||||
curl -L -o personality.sh "${env.PROJECT_PERSONALITY}"
|
||||
"""
|
||||
}
|
||||
stash name: 'yetus', includes: "yetus-*/*,yetus-*/**/*,tools/personality.sh"
|
||||
}
|
||||
}
|
||||
stage ('init health results') {
|
||||
steps {
|
||||
// stash with given name for all tests we might run, so that we can unstash all of them even if
|
||||
// we skip some due to e.g. branch-specific JDK or Hadoop support
|
||||
stash name: 'general-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_GENERAL}/doesn't-match"
|
||||
stash name: 'jdk7-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_JDK7}/doesn't-match"
|
||||
stash name: 'hadoop2-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP2}/doesn't-match"
|
||||
stash name: 'hadoop3-result', allowEmpty: true, includes: "${OUTPUT_DIR_RELATIVE_HADOOP3}/doesn't-match"
|
||||
stash name: 'srctarball-result', allowEmpty: true, includes: "output-srctarball/doesn't-match"
|
||||
}
|
||||
}
|
||||
stage ('health checks') {
|
||||
parallel {
|
||||
stage ('yetus general check') {
|
||||
agent {
|
||||
node {
|
||||
label 'hbase'
|
||||
}
|
||||
}
|
||||
environment {
|
||||
BASEDIR = "${env.WORKSPACE}/component"
|
||||
// TODO does hadoopcheck need to be jdk specific?
|
||||
// Should be things that work with multijdk
|
||||
TESTS = 'all,-unit,-findbugs'
|
||||
// all jdks tested on this stage for this branch
|
||||
// note that JAVA_HOME still needs to be set for tests
|
||||
// that don't support multijdk. JAVA_HOME will also
|
||||
// determine which jvm will be tested last.
|
||||
MULTIJDK = '/usr/lib/jvm/zulu-8-amd64,/usr/lib/jvm/zulu-7-amd64'
|
||||
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_GENERAL}"
|
||||
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_GENERAL}"
|
||||
SKIP_ERRORPRONE = 'true'
|
||||
ASF_NIGHTLIES_GENERAL_CHECK_BASE="${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}"
|
||||
}
|
||||
steps {
|
||||
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
|
||||
echo '(x) {color:red}-1 general checks{color}' >"${OUTPUT_DIR}/commentfile"
|
||||
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
|
||||
'''
|
||||
unstash 'yetus'
|
||||
dir('component') {
|
||||
checkout scm
|
||||
}
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
|
||||
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
|
||||
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
|
||||
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
|
||||
'''
|
||||
// TODO roll this into the hbase_nightly_yetus script
|
||||
script {
|
||||
def ret = sh(
|
||||
returnStatus: true,
|
||||
script: '''#!/usr/bin/env bash
|
||||
set -e
|
||||
declare -i status=0
|
||||
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
|
||||
echo '(/) {color:green}+1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
|
||||
else
|
||||
echo '(x) {color:red}-1 general checks{color}' > "${OUTPUT_DIR}/commentfile"
|
||||
status=1
|
||||
fi
|
||||
echo "-- For more information [see general report|${BUILD_URL}/General_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
|
||||
exit "${status}"
|
||||
'''
|
||||
)
|
||||
if (ret != 0) {
|
||||
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
|
||||
// test output. See HBASE-26339 for more details.
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
stash name: 'general-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
|
||||
sshPublisher(publishers: [
|
||||
sshPublisherDesc(configName: 'Nightlies',
|
||||
transfers: [
|
||||
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
|
||||
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/*-site/*,${env.OUTPUT_DIR_RELATIVE}/*-site/**/*"
|
||||
)
|
||||
]
|
||||
)
|
||||
])
|
||||
sh '''#!/bin/bash -e
|
||||
if [ -d "${OUTPUT_DIR}/branch-site" ]; then
|
||||
echo "Remove ${OUTPUT_DIR}/branch-site for saving space"
|
||||
rm -rf "${OUTPUT_DIR}/branch-site"
|
||||
python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/branch-site" > "${OUTPUT_DIR}/branch-site.html"
|
||||
else
|
||||
echo "No branch-site, skipping"
|
||||
fi
|
||||
if [ -d "${OUTPUT_DIR}/patch-site" ]; then
|
||||
echo "Remove ${OUTPUT_DIR}/patch-site for saving space"
|
||||
rm -rf "${OUTPUT_DIR}/patch-site"
|
||||
python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_GENERAL_CHECK_BASE}/patch-site" > "${OUTPUT_DIR}/patch-site.html"
|
||||
else
|
||||
echo "No patch-site, skipping"
|
||||
fi
|
||||
'''
|
||||
// Has to be relative to WORKSPACE.
|
||||
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/*"
|
||||
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/**/*"
|
||||
publishHTML target: [
|
||||
allowMissing: true,
|
||||
keepAll: true,
|
||||
alwaysLinkToLastBuild: true,
|
||||
// Has to be relative to WORKSPACE
|
||||
reportDir: "${env.OUTPUT_DIR_RELATIVE}",
|
||||
reportFiles: 'console-report.html',
|
||||
reportName: 'General Nightly Build Report'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage ('yetus jdk7 checks') {
|
||||
agent {
|
||||
node {
|
||||
label 'hbase'
|
||||
}
|
||||
}
|
||||
when {
|
||||
branch 'branch-1*'
|
||||
}
|
||||
environment {
|
||||
BASEDIR = "${env.WORKSPACE}/component"
|
||||
TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
|
||||
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_JDK7}"
|
||||
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_JDK7}"
|
||||
// On branches where we do jdk7 checks, jdk7 will be JAVA_HOME already.
|
||||
SKIP_ERRORPRONE = 'true'
|
||||
}
|
||||
steps {
|
||||
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
|
||||
echo '(x) {color:red}-1 jdk7 checks{color}' >"${OUTPUT_DIR}/commentfile"
|
||||
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
|
||||
'''
|
||||
unstash 'yetus'
|
||||
dir('component') {
|
||||
checkout scm
|
||||
}
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
|
||||
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
|
||||
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
|
||||
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
|
||||
'''
|
||||
script {
|
||||
def ret = sh(
|
||||
returnStatus: true,
|
||||
script: '''#!/usr/bin/env bash
|
||||
set -e
|
||||
declare -i status=0
|
||||
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
|
||||
echo '(/) {color:green}+1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
|
||||
else
|
||||
echo '(x) {color:red}-1 jdk7 checks{color}' > "${OUTPUT_DIR}/commentfile"
|
||||
status=1
|
||||
fi
|
||||
echo "-- For more information [see jdk7 report|${BUILD_URL}/JDK7_Nightly_Build_Report/]" >> "${OUTPUT_DIR}/commentfile"
|
||||
exit "${status}"
|
||||
'''
|
||||
)
|
||||
if (ret != 0) {
|
||||
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
|
||||
// test output. See HBASE-26339 for more details.
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
stash name: 'jdk7-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
|
||||
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
|
||||
// zip surefire reports.
|
||||
sh '''#!/bin/bash -e
|
||||
if [ -d "${OUTPUT_DIR}/archiver" ]; then
|
||||
count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
|
||||
if [[ 0 -ne ${count} ]]; then
|
||||
echo "zipping ${count} archived files"
|
||||
zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
|
||||
else
|
||||
echo "No archived files, skipping compressing."
|
||||
fi
|
||||
else
|
||||
echo "No archiver directory, skipping compressing."
|
||||
fi
|
||||
'''
|
||||
sshPublisher(publishers: [
|
||||
sshPublisherDesc(configName: 'Nightlies',
|
||||
transfers: [
|
||||
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
|
||||
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
|
||||
)
|
||||
]
|
||||
)
|
||||
])
|
||||
// remove the big test logs zip file, store the nightlies url in test_logs.html
|
||||
sh '''#!/bin/bash -e
|
||||
if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
|
||||
echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
|
||||
rm -rf "${OUTPUT_DIR}/test_logs.zip"
|
||||
python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
|
||||
else
|
||||
echo "No test_logs.zip, skipping"
|
||||
fi
|
||||
'''
|
||||
// Has to be relative to WORKSPACE.
|
||||
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/*"
|
||||
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/**/*"
|
||||
publishHTML target: [
|
||||
allowMissing : true,
|
||||
keepAll : true,
|
||||
alwaysLinkToLastBuild: true,
|
||||
// Has to be relative to WORKSPACE.
|
||||
reportDir : "${env.OUTPUT_DIR_RELATIVE}",
|
||||
reportFiles : 'console-report.html',
|
||||
reportName : 'JDK7 Nightly Build Report'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage ('yetus jdk8 hadoop2 checks') {
|
||||
agent {
|
||||
node {
|
||||
label 'hbase'
|
||||
}
|
||||
}
|
||||
environment {
|
||||
BASEDIR = "${env.WORKSPACE}/component"
|
||||
TESTS = 'maven,mvninstall,compile,javac,unit,findbugs,htmlout'
|
||||
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
|
||||
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP2}"
|
||||
// this is branch-1 specific. the branch-2+ version uses openjdk artifacts
|
||||
SET_JAVA_HOME = '/usr/lib/jvm/zulu-8-amd64'
|
||||
}
|
||||
steps {
|
||||
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
|
||||
echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' >"${OUTPUT_DIR}/commentfile"
|
||||
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
|
||||
'''
|
||||
unstash 'yetus'
|
||||
dir('component') {
|
||||
checkout scm
|
||||
}
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
|
||||
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
|
||||
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
|
||||
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
|
||||
'''
|
||||
script {
|
||||
def ret = sh(
|
||||
returnStatus: true,
|
||||
script: '''#!/usr/bin/env bash
|
||||
set -e
|
||||
declare -i status=0
|
||||
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
|
||||
echo '(/) {color:green}+1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
|
||||
else
|
||||
echo '(x) {color:red}-1 jdk8 hadoop2 checks{color}' > "${OUTPUT_DIR}/commentfile"
|
||||
status=1
|
||||
fi
|
||||
echo "-- For more information [see jdk8 (hadoop2) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop2)/]" >> "${OUTPUT_DIR}/commentfile"
|
||||
exit "${status}"
|
||||
'''
|
||||
)
|
||||
if (ret != 0) {
|
||||
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
|
||||
// test output. See HBASE-26339 for more details.
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
stash name: 'hadoop2-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
|
||||
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
|
||||
// zip surefire reports.
|
||||
sh '''#!/bin/bash -e
|
||||
if [ -d "${OUTPUT_DIR}/archiver" ]; then
|
||||
count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
|
||||
if [[ 0 -ne ${count} ]]; then
|
||||
echo "zipping ${count} archived files"
|
||||
zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
|
||||
else
|
||||
echo "No archived files, skipping compressing."
|
||||
fi
|
||||
else
|
||||
echo "No archiver directory, skipping compressing."
|
||||
fi
|
||||
'''
|
||||
sshPublisher(publishers: [
|
||||
sshPublisherDesc(configName: 'Nightlies',
|
||||
transfers: [
|
||||
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
|
||||
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
|
||||
)
|
||||
]
|
||||
)
|
||||
])
|
||||
// remove the big test logs zip file, store the nightlies url in test_logs.html
|
||||
sh '''#!/bin/bash -e
|
||||
if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
|
||||
echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
|
||||
rm -rf "${OUTPUT_DIR}/test_logs.zip"
|
||||
python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
|
||||
else
|
||||
echo "No test_logs.zip, skipping"
|
||||
fi
|
||||
'''
|
||||
// Has to be relative to WORKSPACE.
|
||||
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/*"
|
||||
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/**/*"
|
||||
publishHTML target: [
|
||||
allowMissing : true,
|
||||
keepAll : true,
|
||||
alwaysLinkToLastBuild: true,
|
||||
// Has to be relative to WORKSPACE.
|
||||
reportDir : "${env.OUTPUT_DIR_RELATIVE}",
|
||||
reportFiles : 'console-report.html',
|
||||
reportName : 'JDK8 Nightly Build Report (Hadoop2)'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
stage ('yetus jdk8 hadoop3 checks') {
|
||||
agent {
|
||||
node {
|
||||
label 'hbase'
|
||||
}
|
||||
}
|
||||
when {
|
||||
not {
|
||||
branch 'branch-1*'
|
||||
}
|
||||
}
|
||||
environment {
|
||||
BASEDIR = "${env.WORKSPACE}/component"
|
||||
TESTS = 'maven,mvninstall,compile,javac,unit,htmlout'
|
||||
OUTPUT_DIR_RELATIVE = "${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
|
||||
OUTPUT_DIR = "${env.WORKSPACE}/${env.OUTPUT_DIR_RELATIVE_HADOOP3}"
|
||||
// this is branch-1 specific. the branch-2+ version uses openjdk artifacts
|
||||
SET_JAVA_HOME = '/usr/lib/jvm/zulu-8-amd64'
|
||||
// Activates hadoop 3.0 profile in maven runs.
|
||||
HADOOP_PROFILE = '3.0'
|
||||
}
|
||||
steps {
|
||||
// Must do prior to anything else, since if one of them timesout we'll stash the commentfile
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
rm -rf "${OUTPUT_DIR}" && mkdir "${OUTPUT_DIR}"
|
||||
echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' >"${OUTPUT_DIR}/commentfile"
|
||||
echo "-- Something went wrong running this stage, please [check relevant console output|${BUILD_URL}/console]." >> "${OUTPUT_DIR}/commentfile"
|
||||
'''
|
||||
unstash 'yetus'
|
||||
dir('component') {
|
||||
checkout scm
|
||||
}
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
rm -rf "${OUTPUT_DIR}/machine" && mkdir "${OUTPUT_DIR}/machine"
|
||||
"${BASEDIR}/dev-support/gather_machine_environment.sh" "${OUTPUT_DIR_RELATIVE}/machine"
|
||||
echo "got the following saved stats in '${OUTPUT_DIR_RELATIVE}/machine'"
|
||||
ls -lh "${OUTPUT_DIR_RELATIVE}/machine"
|
||||
'''
|
||||
script {
|
||||
def ret = sh(
|
||||
returnStatus: true,
|
||||
script: '''#!/usr/bin/env bash
|
||||
set -e
|
||||
declare -i status=0
|
||||
if "${BASEDIR}/dev-support/hbase_nightly_yetus.sh" ; then
|
||||
echo '(/) {color:green}+1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
|
||||
else
|
||||
echo '(x) {color:red}-1 jdk8 hadoop3 checks{color}' > "${OUTPUT_DIR}/commentfile"
|
||||
status=1
|
||||
fi
|
||||
echo "-- For more information [see jdk8 (hadoop3) report|${BUILD_URL}/JDK8_Nightly_Build_Report_(Hadoop3)/]" >> "${OUTPUT_DIR}/commentfile"
|
||||
exit "${status}"
|
||||
'''
|
||||
)
|
||||
if (ret != 0) {
|
||||
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
|
||||
// test output. See HBASE-26339 for more details.
|
||||
currentBuild.result = 'UNSTABLE'
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
stash name: 'hadoop3-result', includes: "${OUTPUT_DIR_RELATIVE}/commentfile"
|
||||
junit testResults: "${env.OUTPUT_DIR_RELATIVE}/**/target/**/TEST-*.xml", allowEmptyResults: true
|
||||
// zip surefire reports.
|
||||
sh '''#!/bin/bash -e
|
||||
if [ -d "${OUTPUT_DIR}/archiver" ]; then
|
||||
count=$(find "${OUTPUT_DIR}/archiver" -type f | wc -l)
|
||||
if [[ 0 -ne ${count} ]]; then
|
||||
echo "zipping ${count} archived files"
|
||||
zip -q -m -r "${OUTPUT_DIR}/test_logs.zip" "${OUTPUT_DIR}/archiver"
|
||||
else
|
||||
echo "No archived files, skipping compressing."
|
||||
fi
|
||||
else
|
||||
echo "No archiver directory, skipping compressing."
|
||||
fi
|
||||
'''
|
||||
sshPublisher(publishers: [
|
||||
sshPublisherDesc(configName: 'Nightlies',
|
||||
transfers: [
|
||||
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
|
||||
sourceFiles: "${env.OUTPUT_DIR_RELATIVE}/test_logs.zip"
|
||||
)
|
||||
]
|
||||
)
|
||||
])
|
||||
// remove the big test logs zip file, store the nightlies url in test_logs.html
|
||||
sh '''#!/bin/bash -e
|
||||
if [ -f "${OUTPUT_DIR}/test_logs.zip" ]; then
|
||||
echo "Remove ${OUTPUT_DIR}/test_logs.zip for saving space"
|
||||
rm -rf "${OUTPUT_DIR}/test_logs.zip"
|
||||
python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/${OUTPUT_DIR_RELATIVE}" > "${OUTPUT_DIR}/test_logs.html"
|
||||
else
|
||||
echo "No test_logs.zip, skipping"
|
||||
fi
|
||||
'''
|
||||
// Has to be relative to WORKSPACE.
|
||||
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/*"
|
||||
archiveArtifacts "${env.OUTPUT_DIR_RELATIVE}/**/*"
|
||||
publishHTML target: [
|
||||
allowMissing : true,
|
||||
keepAll : true,
|
||||
alwaysLinkToLastBuild: true,
|
||||
// Has to be relative to WORKSPACE.
|
||||
reportDir : "${env.OUTPUT_DIR_RELATIVE}",
|
||||
reportFiles : 'console-report.html',
|
||||
reportName : 'JDK8 Nightly Build Report (Hadoop3)'
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
// This is meant to mimic what a release manager will do to create RCs.
|
||||
// See http://hbase.apache.org/book.html#maven.release
|
||||
stage ('create source tarball') {
|
||||
agent {
|
||||
node {
|
||||
label 'hbase-large'
|
||||
}
|
||||
}
|
||||
tools {
|
||||
maven 'maven_latest'
|
||||
// this needs to be set to the jdk that ought to be used to build releases on the branch the Jenkinsfile is stored in.
|
||||
jdk "jdk_1.7_latest"
|
||||
}
|
||||
environment {
|
||||
BASEDIR = "${env.WORKSPACE}/component"
|
||||
}
|
||||
steps {
|
||||
dir('component') {
|
||||
checkout scm
|
||||
}
|
||||
sh '''#!/bin/bash -e
|
||||
echo "Setting up directories"
|
||||
rm -rf "output-srctarball" && mkdir "output-srctarball"
|
||||
rm -rf "unpacked_src_tarball" && mkdir "unpacked_src_tarball"
|
||||
rm -rf ".m2-for-repo" && mkdir ".m2-for-repo"
|
||||
rm -rf ".m2-for-src" && mkdir ".m2-for-src"
|
||||
echo '(x) {color:red}-1 source release artifact{color}\n-- Something went wrong with this stage, [check relevant console output|${BUILD_URL}/console].' >output-srctarball/commentfile
|
||||
'''
|
||||
dir('component') {
|
||||
checkout scm
|
||||
}
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
rm -rf "output-srctarball/machine" && mkdir "output-srctarball/machine"
|
||||
"${BASEDIR}/dev-support/gather_machine_environment.sh" "output-srctarball/machine"
|
||||
echo "got the following saved stats in 'output-srctarball/machine'"
|
||||
ls -lh "output-srctarball/machine"
|
||||
'''
|
||||
sh """#!/bin/bash -e
|
||||
if "${env.BASEDIR}/dev-support/hbase_nightly_source-artifact.sh" \
|
||||
--intermediate-file-dir output-srctarball \
|
||||
--unpack-temp-dir unpacked_src_tarball \
|
||||
--maven-m2-initial .m2-for-repo \
|
||||
--maven-m2-src-build .m2-for-src \
|
||||
--clean-source-checkout \
|
||||
"${env.BASEDIR}" ; then
|
||||
echo '(/) {color:green}+1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
|
||||
else
|
||||
echo '(x) {color:red}-1 source release artifact{color}\n-- See build output for details.' >output-srctarball/commentfile
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
}
|
||||
post {
|
||||
always {
|
||||
stash name: 'srctarball-result', includes: "output-srctarball/commentfile"
|
||||
sshPublisher(publishers: [
|
||||
sshPublisherDesc(configName: 'Nightlies',
|
||||
transfers: [
|
||||
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
|
||||
sourceFiles: "output-srctarball/hbase-src.tar.gz"
|
||||
)
|
||||
]
|
||||
)
|
||||
])
|
||||
// remove the big src tarball, store the nightlies url in hbase-src.html
|
||||
sh '''#!/bin/bash -e
|
||||
SRC_TAR="${WORKSPACE}/output-srctarball/hbase-src.tar.gz"
|
||||
if [ -f "${SRC_TAR}" ]; then
|
||||
echo "Remove ${SRC_TAR} for saving space"
|
||||
rm -rf "${SRC_TAR}"
|
||||
python2 ${BASEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES_BASE}/output-srctarball" > "${WORKSPACE}/output-srctarball/hbase-src.html"
|
||||
else
|
||||
echo "No hbase-src.tar.gz, skipping"
|
||||
fi
|
||||
'''
|
||||
archiveArtifacts 'output-srctarball/*'
|
||||
archiveArtifacts 'output-srctarball/**/*'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
try {
|
||||
unstash 'general-result'
|
||||
unstash 'jdk7-result'
|
||||
unstash 'hadoop2-result'
|
||||
unstash 'hadoop3-result'
|
||||
unstash 'srctarball-result'
|
||||
sh "printenv"
|
||||
def results = ["${env.OUTPUT_DIR_RELATIVE_GENERAL}/commentfile",
|
||||
"${env.OUTPUT_DIR_RELATIVE_JDK7}/commentfile",
|
||||
"${env.OUTPUT_DIR_RELATIVE_HADOOP2}/commentfile",
|
||||
"${env.OUTPUT_DIR_RELATIVE_HADOOP3}/commentfile",
|
||||
'output-srctarball/commentfile']
|
||||
echo env.BRANCH_NAME
|
||||
echo env.BUILD_URL
|
||||
echo currentBuild.result
|
||||
echo currentBuild.durationString
|
||||
def comment = "Results for branch ${env.BRANCH_NAME}\n"
|
||||
comment += "\t[build ${currentBuild.displayName} on builds.a.o|${env.BUILD_URL}]: "
|
||||
if (currentBuild.result == null || currentBuild.result == "SUCCESS") {
|
||||
comment += "(/) *{color:green}+1 overall{color}*\n"
|
||||
} else {
|
||||
comment += "(x) *{color:red}-1 overall{color}*\n"
|
||||
// Ideally get the committer our of the change and @ mention them in the per-jira comment
|
||||
}
|
||||
comment += "----\ndetails (if available):\n\n"
|
||||
echo ""
|
||||
echo "[DEBUG] trying to aggregate step-wise results"
|
||||
comment += results.collect { fileExists(file: it) ? readFile(file: it) : "" }.join("\n\n")
|
||||
echo "[INFO] Comment:"
|
||||
echo comment
|
||||
echo ""
|
||||
echo "[INFO] There are ${currentBuild.changeSets.size()} change sets."
|
||||
getJirasToComment(currentBuild).each { currentIssue ->
|
||||
jiraComment issueKey: currentIssue, body: comment
|
||||
}
|
||||
} catch (Exception exception) {
|
||||
echo "Got exception: ${exception}"
|
||||
echo " ${exception.getStackTrace()}"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
import org.jenkinsci.plugins.workflow.support.steps.build.RunWrapper
|
||||
@NonCPS
|
||||
List<String> getJirasToComment(RunWrapper thisBuild) {
|
||||
def seenJiras = []
|
||||
thisBuild.changeSets.each { cs ->
|
||||
cs.getItems().each { change ->
|
||||
CharSequence msg = change.msg
|
||||
echo "change: ${change}"
|
||||
echo " ${msg}"
|
||||
echo " ${change.commitId}"
|
||||
echo " ${change.author}"
|
||||
echo ""
|
||||
msg.eachMatch("HBASE-[0-9]+") { currentIssue ->
|
||||
echo "[DEBUG] found jira key: ${currentIssue}"
|
||||
if (currentIssue in seenJiras) {
|
||||
echo "[DEBUG] already commented on ${currentIssue}."
|
||||
} else {
|
||||
echo "[INFO] commenting on ${currentIssue}."
|
||||
seenJiras << currentIssue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return seenJiras
|
||||
}
|
|
@ -1,226 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
pipeline {
|
||||
|
||||
agent {
|
||||
label 'hbase'
|
||||
}
|
||||
|
||||
options {
|
||||
// N.B. this is per-branch, which means per PR
|
||||
disableConcurrentBuilds()
|
||||
buildDiscarder(logRotator(numToKeepStr: '15'))
|
||||
timeout (time: 7, unit: 'HOURS')
|
||||
timestamps()
|
||||
checkoutToSubdirectory('src')
|
||||
}
|
||||
|
||||
environment {
|
||||
SOURCEDIR = 'src'
|
||||
// will also need to change notification section below
|
||||
PATCHDIR = 'out'
|
||||
DOCKERFILE = "${SOURCEDIR}/dev-support/docker/Dockerfile"
|
||||
YETUS='yetus'
|
||||
// Branch or tag name. Yetus release tags are 'rel/X.Y.Z'
|
||||
YETUS_VERSION='rel/0.12.0'
|
||||
}
|
||||
|
||||
parameters {
|
||||
booleanParam(name: 'DEBUG',
|
||||
defaultValue: false,
|
||||
description: 'Print extra outputs for debugging the jenkins job and yetus')
|
||||
}
|
||||
|
||||
stages {
|
||||
stage ('install yetus') {
|
||||
steps {
|
||||
dir("${WORKSPACE}/${YETUS}") {
|
||||
checkout([
|
||||
$class: 'GitSCM',
|
||||
branches: [[name: "${env.YETUS_VERSION}"]],
|
||||
userRemoteConfigs: [[ url: 'https://github.com/apache/yetus.git']]]
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage ('precommit-run') {
|
||||
steps {
|
||||
withCredentials([
|
||||
usernamePassword(
|
||||
credentialsId: 'apache-hbase-at-github.com',
|
||||
passwordVariable: 'GITHUB_PASSWORD',
|
||||
usernameVariable: 'GITHUB_USER'
|
||||
)]) {
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
TESTPATCHBIN="${WORKSPACE}/${YETUS}/precommit/src/main/shell/test-patch.sh"
|
||||
# this must be clean for every run
|
||||
if [[ -d "${WORKSPACE}/${PATCHDIR}" ]]; then
|
||||
rm -rf "${WORKSPACE}/${PATCHDIR}"
|
||||
fi
|
||||
mkdir -p "${WORKSPACE}/${PATCHDIR}"
|
||||
|
||||
## Checking on H* machine nonsense
|
||||
echo "JAVA_HOME: ${JAVA_HOME}"
|
||||
ls -l "${JAVA_HOME}" || true
|
||||
echo "MAVEN_HOME: ${MAVEN_HOME}"
|
||||
echo "maven version:"
|
||||
mvn --offline --version || true
|
||||
echo "getting machine specs, find in ${BUILD_URL}/artifact/patchprocess/machine/"
|
||||
mkdir "${PATCHDIR}/machine"
|
||||
cat /proc/cpuinfo >"${PATCHDIR}/machine/cpuinfo" 2>&1 || true
|
||||
cat /proc/meminfo >"${PATCHDIR}/machine/meminfo" 2>&1 || true
|
||||
cat /proc/diskstats >"${PATCHDIR}/machine/diskstats" 2>&1 || true
|
||||
cat /sys/block/sda/stat >"${PATCHDIR}/machine/sys-block-sda-stat" 2>&1 || true
|
||||
df -h >"${PATCHDIR}/machine/df-h" 2>&1 || true
|
||||
ps -Awwf >"${PATCHDIR}/machine/ps-Awwf" 2>&1 || true
|
||||
ifconfig -a >"${PATCHDIR}/machine/ifconfig-a" 2>&1 || true
|
||||
lsblk -ta >"${PATCHDIR}/machine/lsblk-ta" 2>&1 || true
|
||||
lsblk -fa >"${PATCHDIR}/machine/lsblk-fa" 2>&1 || true
|
||||
cat /proc/loadavg >"${output}/loadavg" 2>&1 || true
|
||||
ulimit -a >"${PATCHDIR}/machine/ulimit-a" 2>&1 || true
|
||||
## /H*
|
||||
|
||||
# If CHANGE_URL is set (e.g., Github Branch Source plugin), process it.
|
||||
# Otherwise exit, because we don't want HBase to do a
|
||||
# full build. We wouldn't normally do this check for smaller
|
||||
# projects. :)
|
||||
if [[ -z "${CHANGE_URL}" ]]; then
|
||||
echo "Full build skipped" > "${WORKSPACE}/${PATCHDIR}/report.html"
|
||||
exit 0
|
||||
fi
|
||||
# enable debug output for yetus
|
||||
if [[ "true" = "${DEBUG}" ]]; then
|
||||
YETUS_ARGS+=("--debug")
|
||||
fi
|
||||
# If we're doing docker, make sure we don't accidentally pollute the image with a host java path
|
||||
if [ -n "${JAVA_HOME}" ]; then
|
||||
unset JAVA_HOME
|
||||
fi
|
||||
YETUS_ARGS+=("--patch-dir=${WORKSPACE}/${PATCHDIR}")
|
||||
# where the source is located
|
||||
YETUS_ARGS+=("--basedir=${WORKSPACE}/${SOURCEDIR}")
|
||||
# our project defaults come from a personality file
|
||||
# which will get loaded automatically by setting the project name
|
||||
YETUS_ARGS+=("--project=hbase")
|
||||
# lots of different output formats
|
||||
YETUS_ARGS+=("--brief-report-file=${WORKSPACE}/${PATCHDIR}/brief.txt")
|
||||
YETUS_ARGS+=("--console-report-file=${WORKSPACE}/${PATCHDIR}/console.txt")
|
||||
YETUS_ARGS+=("--html-report-file=${WORKSPACE}/${PATCHDIR}/report.html")
|
||||
# enable writing back to Github
|
||||
YETUS_ARGS+=(--github-password="${GITHUB_PASSWORD}")
|
||||
YETUS_ARGS+=(--github-user=${GITHUB_USER})
|
||||
# enable writing back to ASF JIRA
|
||||
YETUS_ARGS+=(--jira-password="${JIRA_PASSWORD}")
|
||||
YETUS_ARGS+=(--jira-user="${JIRA_USER}")
|
||||
# auto-kill any surefire stragglers during unit test runs
|
||||
YETUS_ARGS+=("--reapermode=kill")
|
||||
YETUS_ARGS+=("--multijdktests=compile")
|
||||
# set relatively high limits for ASF machines
|
||||
# changing these to higher values may cause problems
|
||||
# with other jobs on systemd-enabled machines
|
||||
YETUS_ARGS+=("--proclimit=10000")
|
||||
YETUS_ARGS+=("--dockermemlimit=20g")
|
||||
# -1 findbugs issues that show up prior to the patch being applied
|
||||
YETUS_ARGS+=("--findbugs-strict-precheck")
|
||||
# rsync these files back into the archive dir
|
||||
YETUS_ARGS+=("--archive-list=rat.txt")
|
||||
# URL for user-side presentation in reports and such to our artifacts
|
||||
# (needs to match the archive bits below)
|
||||
YETUS_ARGS+=("--build-url-artifacts=artifact/out")
|
||||
# plugins to enable
|
||||
YETUS_ARGS+=("--plugins=all")
|
||||
# don't let these tests cause -1s because we aren't really paying that
|
||||
# much attention to them
|
||||
YETUS_ARGS+=("--tests-filter=ruby-lint,test4tests")
|
||||
# run in docker mode and specifically point to our
|
||||
# Dockerfile since we don't want to use the auto-pulled version.
|
||||
YETUS_ARGS+=("--docker")
|
||||
YETUS_ARGS+=("--dockerfile=${DOCKERFILE}")
|
||||
YETUS_ARGS+=("--mvn-custom-repos")
|
||||
# Branch-1 specific, look at the Zulu jdk8
|
||||
YETUS_ARGS+=("--multijdkdirs=/usr/lib/jvm/zulu-8-amd64,/usr/lib/jvm/zulu-7-amd64")
|
||||
YETUS_ARGS+=("--findbugs-home=/usr")
|
||||
YETUS_ARGS+=("--whitespace-eol-ignore-list=.*/generated/.*")
|
||||
YETUS_ARGS+=("--whitespace-tabs-ignore-list=.*/generated/.*")
|
||||
YETUS_ARGS+=("--personality=${SOURCEDIR}/dev-support/hbase-personality.sh")
|
||||
YETUS_ARGS+=("--quick-hadoopcheck")
|
||||
YETUS_ARGS+=("--skip-errorprone")
|
||||
# effectively treat dev-support as a custom maven module
|
||||
YETUS_ARGS+=("--skip-dirs=dev-support")
|
||||
# help keep the ASF boxes clean
|
||||
YETUS_ARGS+=("--sentinel")
|
||||
# use emoji vote so it is easier to find the broken line
|
||||
YETUS_ARGS+=("--github-use-emoji-vote")
|
||||
"${TESTPATCHBIN}" "${YETUS_ARGS[@]}"
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
// Yetus output
|
||||
archiveArtifacts "${env.PATCHDIR}/**"
|
||||
// Publish the HTML report so that it can be looked at
|
||||
// Has to be relative to WORKSPACE.
|
||||
publishHTML (target: [
|
||||
allowMissing: true,
|
||||
keepAll: true,
|
||||
alwaysLinkToLastBuild: true,
|
||||
// Has to be relative to WORKSPACE
|
||||
reportDir: "${env.PATCHDIR}",
|
||||
reportFiles: 'report.html',
|
||||
reportName: 'Yetus Report'
|
||||
])
|
||||
// Publish JUnit results
|
||||
try {
|
||||
junit "${env.SOURCEDIR}/**/target/surefire-reports/*.xml",
|
||||
allowEmptyResults: true, skipPublishingChecks: true
|
||||
} catch(e) {
|
||||
echo 'junit processing: ' + e.toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Jenkins pipeline jobs fill slaves on PRs without this :(
|
||||
cleanup() {
|
||||
script {
|
||||
sh '''
|
||||
# See YETUS-764
|
||||
if [ -f "${WORKSPACE}/${PATCHDIR}/pidfile.txt" ]; then
|
||||
echo "test-patch process appears to still be running: killing"
|
||||
kill `cat "${WORKSPACE}/${PATCHDIR}/pidfile.txt"` || true
|
||||
sleep 10
|
||||
fi
|
||||
if [ -f "${WORKSPACE}/${PATCHDIR}/cidfile.txt" ]; then
|
||||
echo "test-patch container appears to still be running: killing"
|
||||
docker kill `cat "${WORKSPACE}/${PATCHDIR}/cidfile.txt"` || true
|
||||
fi
|
||||
# See HADOOP-13951
|
||||
chmod -R u+rxw "${WORKSPACE}"
|
||||
'''
|
||||
deleteDir()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'hbase'
|
||||
}
|
||||
}
|
||||
triggers {
|
||||
cron('@daily')
|
||||
}
|
||||
options {
|
||||
buildDiscarder(logRotator(numToKeepStr: '50'))
|
||||
timeout (time: 15, unit: 'MINUTES')
|
||||
timestamps()
|
||||
}
|
||||
environment {
|
||||
ASF_NIGHTLIES = 'https://nightlies.apache.org'
|
||||
}
|
||||
parameters {
|
||||
booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
|
||||
}
|
||||
stages {
|
||||
stage ('build flaky report') {
|
||||
steps {
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
if [ "${DEBUG}" = "true" ]; then
|
||||
set -x
|
||||
fi
|
||||
declare -a flaky_args
|
||||
flaky_args=("${flaky_args[@]}" --urls "${JENKINS_URL}/job/HBase%20Nightly/job/${BRANCH_NAME}" --is-yetus True --max-builds 20)
|
||||
flaky_args=("${flaky_args[@]}" --urls "${JENKINS_URL}/job/HBase-Flaky-Tests/job/${BRANCH_NAME}" --is-yetus False --max-builds 50)
|
||||
docker build -t hbase-dev-support dev-support
|
||||
docker run --ulimit nproc=12500 -v "${WORKSPACE}":/hbase -u `id -u`:`id -g` --workdir=/hbase hbase-dev-support \
|
||||
python dev-support/flaky-tests/report-flakies.py --mvn -v -o output "${flaky_args[@]}"
|
||||
'''
|
||||
sshPublisher(publishers: [
|
||||
sshPublisherDesc(configName: 'Nightlies',
|
||||
transfers: [
|
||||
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
|
||||
sourceFiles: "output/dashboard.html"
|
||||
)
|
||||
]
|
||||
)
|
||||
])
|
||||
sh '''
|
||||
if [ -f "output/dashboard.html" ]; then
|
||||
./dev-support/gen_redirect_html.py "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/output/dashboard.html" > output/dashboard.html
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
// Has to be relative to WORKSPACE.
|
||||
archiveArtifacts artifacts: "output/*"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,94 +0,0 @@
|
|||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
pipeline {
|
||||
agent {
|
||||
node {
|
||||
label 'hbase'
|
||||
}
|
||||
}
|
||||
triggers {
|
||||
cron('H H/4 * * *') // Every four hours. See https://jenkins.io/doc/book/pipeline/syntax/#cron-syntax
|
||||
}
|
||||
options {
|
||||
// this should roughly match how long we tell the flaky dashboard to look at
|
||||
buildDiscarder(logRotator(numToKeepStr: '50'))
|
||||
timeout (time: 2, unit: 'HOURS')
|
||||
timestamps()
|
||||
}
|
||||
environment {
|
||||
ASF_NIGHTLIES = 'https://nightlies.apache.org'
|
||||
}
|
||||
parameters {
|
||||
booleanParam(name: 'DEBUG', defaultValue: false, description: 'Produce a lot more meta-information.')
|
||||
}
|
||||
tools {
|
||||
// this should match what the yetus nightly job for the branch will use
|
||||
maven 'maven_latest'
|
||||
jdk "jdk_1.8_latest"
|
||||
}
|
||||
stages {
|
||||
stage ('run flaky tests') {
|
||||
steps {
|
||||
sh '''#!/usr/bin/env bash
|
||||
set -e
|
||||
declare -a curl_args=(--fail)
|
||||
tmpdir=$(realpath target)
|
||||
declare -a mvn_args=(--batch-mode -fn -Dbuild.id="${BUILD_ID}" -Dmaven.repo.local="${WORKSPACE}/local-repository" -Djava.io.tmpdir=${tmpdir})
|
||||
if [ "${DEBUG}" = "true" ]; then
|
||||
curl_args=("${curl_args[@]}" -v)
|
||||
mvn_args=("${mvn_args[@]}" -X)
|
||||
set -x
|
||||
fi
|
||||
curl "${curl_args[@]}" -o includes.txt "${JENKINS_URL}/job/HBase-Find-Flaky-Tests/job/${BRANCH_NAME}/lastSuccessfulBuild/artifact/output/includes"
|
||||
if [ -s includes.txt ]; then
|
||||
rm -rf local-repository/org/apache/hbase
|
||||
mvn clean "${mvn_args[@]}"
|
||||
rm -rf "target/machine" && mkdir -p "target/machine"
|
||||
if [ -x dev-support/gather_machine_environment.sh ]; then
|
||||
"./dev-support/gather_machine_environment.sh" "target/machine"
|
||||
echo "got the following saved stats in 'target/machine'"
|
||||
ls -lh "target/machine"
|
||||
else
|
||||
echo "Skipped gathering machine environment because we couldn't read the script to do so."
|
||||
fi
|
||||
mvn package "${mvn_args[@]}" -Dtest="$(cat includes.txt)" -Dmaven.test.redirectTestOutputToFile=true -Dsurefire.firstPartForkCount=3 -Dsurefire.secondPartForkCount=3
|
||||
else
|
||||
echo "set of flaky tests is currently empty."
|
||||
fi
|
||||
'''
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
junit testResults: "**/surefire-reports/*.xml", allowEmptyResults: true
|
||||
sshPublisher(publishers: [
|
||||
sshPublisherDesc(configName: 'Nightlies',
|
||||
transfers: [
|
||||
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
|
||||
sourceFiles: "**/surefire-reports/*,**/test-data/*"
|
||||
)
|
||||
]
|
||||
)
|
||||
])
|
||||
sh '''#!/bin/bash -e
|
||||
./dev-support/gen_redirect_html.py "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}" > test_logs.html
|
||||
'''
|
||||
archiveArtifacts artifacts: 'includes.txt,test_logs.html,target/machine/*'
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue