444 lines
23 KiB
Plaintext
444 lines
23 KiB
Plaintext
// Licensed to the Apache Software Foundation (ASF) under one
|
|
// or more contributor license agreements. See the NOTICE file
|
|
// distributed with this work for additional information
|
|
// regarding copyright ownership. The ASF licenses this file
|
|
// to you under the Apache License, Version 2.0 (the
|
|
// "License"); you may not use this file except in compliance
|
|
// with the License. You may obtain a copy of the License at
|
|
//
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
//
|
|
// Unless required by applicable law or agreed to in writing,
|
|
// software distributed under the License is distributed on an
|
|
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
// KIND, either express or implied. See the License for the
|
|
// specific language governing permissions and limitations
|
|
// under the License.
|
|
|
|
pipeline {
|
|
|
|
agent {
|
|
label 'Hadoop'
|
|
}
|
|
|
|
options {
|
|
// N.B. this is per-branch, which means per PR
|
|
disableConcurrentBuilds()
|
|
buildDiscarder(logRotator(numToKeepStr: '15'))
|
|
timeout (time: 10, unit: 'HOURS')
|
|
timestamps()
|
|
skipDefaultCheckout()
|
|
}
|
|
|
|
environment {
|
|
SRC_REL = 'src'
|
|
PATCH_REL = 'output'
|
|
YETUS_REL = 'yetus'
|
|
DOCKERFILE_REL = "${SRC_REL}/dev-support/docker/Dockerfile"
|
|
YETUS_DRIVER_REL = "${SRC_REL}/dev-support/jenkins_precommit_github_yetus.sh"
|
|
// Branch or tag name. Yetus release tags are 'rel/X.Y.Z'
|
|
YETUS_VERSION = 'rel/0.12.0'
|
|
GENERAL_CHECK_PLUGINS = 'all,-javadoc,-jira,-shadedjars,-unit'
|
|
JDK_SPECIFIC_PLUGINS = 'compile,github,htmlout,javac,javadoc,maven,mvninstall,shadedjars,unit'
|
|
// output from surefire; sadly the archive function in yetus only works on file names.
|
|
ARCHIVE_PATTERN_LIST = 'TEST-*.xml,org.apache.h*.txt,*.dumpstream,*.dump'
|
|
// These tests currently have known failures. Once they burn down to 0, remove from here so that new problems will cause a failure.
|
|
TESTS_FILTER = 'cc,checkstyle,javac,javadoc,pylint,shellcheck,whitespace,perlcritic,ruby-lint,rubocop,mvnsite'
|
|
EXCLUDE_TESTS_URL = "${JENKINS_URL}/job/HBase/job/HBase-Find-Flaky-Tests/job/${CHANGE_TARGET}/lastSuccessfulBuild/artifact/output/excludes"
|
|
|
|
// a global view of paths. parallel stages can land on the same host concurrently, so each
|
|
// stage works in its own subdirectory. there is an "output" under each of these
|
|
// directories, which we retrieve after the build is complete.
|
|
WORKDIR_REL_GENERAL_CHECK = 'yetus-general-check'
|
|
WORKDIR_REL_JDK8_HADOOP2_CHECK = 'yetus-jdk8-hadoop2-check'
|
|
WORKDIR_REL_JDK11_HADOOP3_CHECK = 'yetus-jdk11-hadoop3-check'
|
|
ASF_NIGHTLIES = 'https://nightlies.apache.org'
|
|
}
|
|
|
|
parameters {
|
|
booleanParam(name: 'DEBUG',
|
|
defaultValue: false,
|
|
description: 'Print extra outputs for debugging the jenkins job and yetus')
|
|
}
|
|
|
|
stages {
|
|
stage ('precommit checks') {
|
|
parallel {
|
|
stage ('yetus general check') {
|
|
agent {
|
|
node {
|
|
label 'Hadoop'
|
|
}
|
|
}
|
|
environment {
|
|
// customized per parallel stage
|
|
PLUGINS = "${GENERAL_CHECK_PLUGINS}"
|
|
SET_JAVA_HOME = '/usr/lib/jvm/java-8'
|
|
WORKDIR_REL = "${WORKDIR_REL_GENERAL_CHECK}"
|
|
// identical for all parallel stages
|
|
WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
|
|
YETUSDIR = "${WORKDIR}/${YETUS_REL}"
|
|
SOURCEDIR = "${WORKDIR}/${SRC_REL}"
|
|
PATCHDIR = "${WORKDIR}/${PATCH_REL}"
|
|
BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
|
|
DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
|
|
YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
|
|
}
|
|
steps {
|
|
dir("${SOURCEDIR}") {
|
|
checkout scm
|
|
}
|
|
dir("${YETUSDIR}") {
|
|
checkout([
|
|
$class : 'GitSCM',
|
|
branches : [[name: "${YETUS_VERSION}"]],
|
|
userRemoteConfigs: [[url: 'https://github.com/apache/yetus.git']]]
|
|
)
|
|
}
|
|
dir("${WORKDIR}") {
|
|
withCredentials([
|
|
usernamePassword(
|
|
credentialsId: 'apache-hbase-at-github.com',
|
|
passwordVariable: 'GITHUB_PASSWORD',
|
|
usernameVariable: 'GITHUB_USER'
|
|
)]) {
|
|
script {
|
|
def ret = sh(
|
|
label: 'test-patch',
|
|
returnStatus: true,
|
|
script: '''#!/bin/bash -e
|
|
hostname -a ; pwd ; ls -la
|
|
printenv 2>&1 | sort
|
|
echo "[INFO] Launching Yetus via ${YETUS_DRIVER}"
|
|
"${YETUS_DRIVER}"
|
|
'''
|
|
)
|
|
if (ret != 0) {
|
|
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
|
|
// test output. See HBASE-26339 for more details.
|
|
currentBuild.result = 'UNSTABLE'
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
post {
|
|
always {
|
|
// Has to be relative to WORKSPACE.
|
|
archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit"
|
|
archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/**/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit/**/*"
|
|
publishHTML target: [
|
|
allowMissing: true,
|
|
keepAll: true,
|
|
alwaysLinkToLastBuild: true,
|
|
// Has to be relative to WORKSPACE
|
|
reportDir: "${WORKDIR_REL}/${PATCH_REL}",
|
|
reportFiles: 'report.html',
|
|
reportName: 'PR General Check Report'
|
|
]
|
|
}
|
|
// Jenkins pipeline jobs fill slaves on PRs without this :(
|
|
cleanup() {
|
|
script {
|
|
sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
|
|
# See YETUS-764
|
|
if [ -f "${PATCHDIR}/pidfile.txt" ]; then
|
|
echo "test-patch process appears to still be running: killing"
|
|
kill `cat "${PATCHDIR}/pidfile.txt"` || true
|
|
sleep 10
|
|
fi
|
|
if [ -f "${PATCHDIR}/cidfile.txt" ]; then
|
|
echo "test-patch container appears to still be running: killing"
|
|
docker kill `cat "${PATCHDIR}/cidfile.txt"` || true
|
|
fi
|
|
# See HADOOP-13951
|
|
chmod -R u+rxw "${WORKSPACE}"
|
|
'''
|
|
dir ("${WORKDIR}") {
|
|
deleteDir()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
stage ('yetus jdk8 Hadoop2 checks') {
|
|
agent {
|
|
node {
|
|
label 'Hadoop'
|
|
}
|
|
}
|
|
environment {
|
|
// customized per parallel stage
|
|
PLUGINS = "${JDK_SPECIFIC_PLUGINS}"
|
|
SET_JAVA_HOME = '/usr/lib/jvm/java-8'
|
|
WORKDIR_REL = "${WORKDIR_REL_JDK8_HADOOP2_CHECK}"
|
|
// identical for all parallel stages
|
|
WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
|
|
YETUSDIR = "${WORKDIR}/${YETUS_REL}"
|
|
SOURCEDIR = "${WORKDIR}/${SRC_REL}"
|
|
PATCHDIR = "${WORKDIR}/${PATCH_REL}"
|
|
BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
|
|
DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
|
|
YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
|
|
SKIP_ERRORPRONE = true
|
|
}
|
|
steps {
|
|
dir("${SOURCEDIR}") {
|
|
checkout scm
|
|
}
|
|
dir("${YETUSDIR}") {
|
|
checkout([
|
|
$class : 'GitSCM',
|
|
branches : [[name: "${YETUS_VERSION}"]],
|
|
userRemoteConfigs: [[url: 'https://github.com/apache/yetus.git']]]
|
|
)
|
|
}
|
|
dir("${WORKDIR}") {
|
|
withCredentials([
|
|
usernamePassword(
|
|
credentialsId: 'apache-hbase-at-github.com',
|
|
passwordVariable: 'GITHUB_PASSWORD',
|
|
usernameVariable: 'GITHUB_USER'
|
|
)]) {
|
|
script {
|
|
def ret = sh(
|
|
label: 'test-patch',
|
|
returnStatus: true,
|
|
script: '''#!/bin/bash -e
|
|
hostname -a ; pwd ; ls -la
|
|
printenv 2>&1 | sort
|
|
echo "[INFO] Launching Yetus via ${YETUS_DRIVER}"
|
|
"${YETUS_DRIVER}"
|
|
'''
|
|
)
|
|
if (ret != 0) {
|
|
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
|
|
// test output. See HBASE-26339 for more details.
|
|
currentBuild.result = 'UNSTABLE'
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
post {
|
|
always {
|
|
junit testResults: "${WORKDIR_REL}/${SRC_REL}/**/target/**/TEST-*.xml", allowEmptyResults: true
|
|
sh label: 'zip surefire reports', script: '''#!/bin/bash -e
|
|
if [ -d "${PATCHDIR}/archiver" ]; then
|
|
count=$(find "${PATCHDIR}/archiver" -type f | wc -l)
|
|
if [[ 0 -ne ${count} ]]; then
|
|
echo "zipping ${count} archived files"
|
|
zip -q -m -r "${PATCHDIR}/test_logs.zip" "${PATCHDIR}/archiver"
|
|
else
|
|
echo "No archived files, skipping compressing."
|
|
fi
|
|
else
|
|
echo "No archiver directory, skipping compressing."
|
|
fi
|
|
'''
|
|
sshPublisher(publishers: [
|
|
sshPublisherDesc(configName: 'Nightlies',
|
|
transfers: [
|
|
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
|
|
sourceFiles: "${env.WORKDIR_REL}/${env.PATCH_REL}/test_logs.zip"
|
|
)
|
|
]
|
|
)
|
|
])
|
|
// remove the big test logs zip file, store the nightlies url in test_logs.txt
|
|
sh '''#!/bin/bash -e
|
|
if [ -f "${PATCHDIR}/test_logs.zip" ]; then
|
|
echo "Remove ${PATCHDIR}/test_logs.zip for saving space"
|
|
rm -rf "${PATCHDIR}/test_logs.zip"
|
|
python ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${WORKDIR_REL}/${PATCH_REL}" > "${PATCHDIR}/test_logs.html"
|
|
else
|
|
echo "No test_logs.zip, skipping"
|
|
fi
|
|
'''
|
|
// Has to be relative to WORKSPACE.
|
|
archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit"
|
|
archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/**/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit/**/*"
|
|
publishHTML target: [
|
|
allowMissing: true,
|
|
keepAll: true,
|
|
alwaysLinkToLastBuild: true,
|
|
// Has to be relative to WORKSPACE
|
|
reportDir: "${WORKDIR_REL}/${PATCH_REL}",
|
|
reportFiles: 'report.html',
|
|
reportName: 'PR JDK8 Hadoop3 Check Report'
|
|
]
|
|
}
|
|
// Jenkins pipeline jobs fill slaves on PRs without this :(
|
|
cleanup() {
|
|
script {
|
|
sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
|
|
# See YETUS-764
|
|
if [ -f "${PATCHDIR}/pidfile.txt" ]; then
|
|
echo "test-patch process appears to still be running: killing"
|
|
kill `cat "${PATCHDIR}/pidfile.txt"` || true
|
|
sleep 10
|
|
fi
|
|
if [ -f "${PATCHDIR}/cidfile.txt" ]; then
|
|
echo "test-patch container appears to still be running: killing"
|
|
docker kill `cat "${PATCHDIR}/cidfile.txt"` || true
|
|
fi
|
|
# See HADOOP-13951
|
|
chmod -R u+rxw "${WORKSPACE}"
|
|
'''
|
|
dir ("${WORKDIR}") {
|
|
deleteDir()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
stage ('yetus jdk11 hadoop3 checks') {
|
|
agent {
|
|
node {
|
|
label 'Hadoop'
|
|
}
|
|
}
|
|
environment {
|
|
// customized per parallel stage
|
|
PLUGINS = "${JDK_SPECIFIC_PLUGINS}"
|
|
SET_JAVA_HOME = '/usr/lib/jvm/java-11'
|
|
HADOOP_PROFILE = '3.0'
|
|
WORKDIR_REL = "${WORKDIR_REL_JDK11_HADOOP3_CHECK}"
|
|
// identical for all parallel stages
|
|
WORKDIR = "${WORKSPACE}/${WORKDIR_REL}"
|
|
YETUSDIR = "${WORKDIR}/${YETUS_REL}"
|
|
SOURCEDIR = "${WORKDIR}/${SRC_REL}"
|
|
PATCHDIR = "${WORKDIR}/${PATCH_REL}"
|
|
BUILD_URL_ARTIFACTS = "artifact/${WORKDIR_REL}/${PATCH_REL}"
|
|
DOCKERFILE = "${WORKDIR}/${DOCKERFILE_REL}"
|
|
YETUS_DRIVER = "${WORKDIR}/${YETUS_DRIVER_REL}"
|
|
SKIP_ERRORPRONE = true
|
|
}
|
|
steps {
|
|
dir("${SOURCEDIR}") {
|
|
checkout scm
|
|
}
|
|
dir("${YETUSDIR}") {
|
|
checkout([
|
|
$class : 'GitSCM',
|
|
branches : [[name: "${YETUS_VERSION}"]],
|
|
userRemoteConfigs: [[url: 'https://github.com/apache/yetus.git']]]
|
|
)
|
|
}
|
|
dir("${WORKDIR}") {
|
|
withCredentials([
|
|
usernamePassword(
|
|
credentialsId: 'apache-hbase-at-github.com',
|
|
passwordVariable: 'GITHUB_PASSWORD',
|
|
usernameVariable: 'GITHUB_USER'
|
|
)]) {
|
|
script {
|
|
def ret = sh(
|
|
label: 'test-patch',
|
|
returnStatus: true,
|
|
script: '''#!/bin/bash -e
|
|
hostname -a ; pwd ; ls -la
|
|
printenv 2>&1 | sort
|
|
echo "[INFO] Launching Yetus via ${YETUS_DRIVER}"
|
|
"${YETUS_DRIVER}"
|
|
'''
|
|
)
|
|
if (ret != 0) {
|
|
// mark the build as UNSTABLE instead of FAILURE, to avoid skipping the later publish of
|
|
// test output. See HBASE-26339 for more details.
|
|
currentBuild.result = 'UNSTABLE'
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
post {
|
|
always {
|
|
junit testResults: "${WORKDIR_REL}/${SRC_REL}/**/target/**/TEST-*.xml", allowEmptyResults: true
|
|
sh label: 'zip surefire reports', script: '''#!/bin/bash -e
|
|
if [ -d "${PATCHDIR}/archiver" ]; then
|
|
count=$(find "${PATCHDIR}/archiver" -type f | wc -l)
|
|
if [[ 0 -ne ${count} ]]; then
|
|
echo "zipping ${count} archived files"
|
|
zip -q -m -r "${PATCHDIR}/test_logs.zip" "${PATCHDIR}/archiver"
|
|
else
|
|
echo "No archived files, skipping compressing."
|
|
fi
|
|
else
|
|
echo "No archiver directory, skipping compressing."
|
|
fi
|
|
'''
|
|
sshPublisher(publishers: [
|
|
sshPublisherDesc(configName: 'Nightlies',
|
|
transfers: [
|
|
sshTransfer(remoteDirectory: "hbase/${JOB_NAME}/${BUILD_NUMBER}",
|
|
sourceFiles: "${env.WORKDIR_REL}/${env.PATCH_REL}/test_logs.zip"
|
|
)
|
|
]
|
|
)
|
|
])
|
|
// remove the big test logs zip file, store the nightlies url in test_logs.txt
|
|
sh '''#!/bin/bash -e
|
|
if [ -f "${PATCHDIR}/test_logs.zip" ]; then
|
|
echo "Remove ${PATCHDIR}/test_logs.zip for saving space"
|
|
rm -rf "${PATCHDIR}/test_logs.zip"
|
|
python ${SOURCEDIR}/dev-support/gen_redirect_html.py "${ASF_NIGHTLIES}/hbase/${JOB_NAME}/${BUILD_NUMBER}/${WORKDIR_REL}/${PATCH_REL}" > "${PATCHDIR}/test_logs.html"
|
|
else
|
|
echo "No test_logs.zip, skipping"
|
|
fi
|
|
'''
|
|
// Has to be relative to WORKSPACE.
|
|
archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit"
|
|
archiveArtifacts artifacts: "${WORKDIR_REL}/${PATCH_REL}/**/*", excludes: "${WORKDIR_REL}/${PATCH_REL}/precommit/**/*"
|
|
publishHTML target: [
|
|
allowMissing: true,
|
|
keepAll: true,
|
|
alwaysLinkToLastBuild: true,
|
|
// Has to be relative to WORKSPACE
|
|
reportDir: "${WORKDIR_REL}/${PATCH_REL}",
|
|
reportFiles: 'report.html',
|
|
reportName: 'PR JDK11 Hadoop3 Check Report'
|
|
]
|
|
}
|
|
// Jenkins pipeline jobs fill slaves on PRs without this :(
|
|
cleanup() {
|
|
script {
|
|
sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
|
|
# See YETUS-764
|
|
if [ -f "${PATCHDIR}/pidfile.txt" ]; then
|
|
echo "test-patch process appears to still be running: killing"
|
|
kill `cat "${PATCHDIR}/pidfile.txt"` || true
|
|
sleep 10
|
|
fi
|
|
if [ -f "${PATCHDIR}/cidfile.txt" ]; then
|
|
echo "test-patch container appears to still be running: killing"
|
|
docker kill `cat "${PATCHDIR}/cidfile.txt"` || true
|
|
fi
|
|
# See HADOOP-13951
|
|
chmod -R u+rxw "${WORKSPACE}"
|
|
'''
|
|
dir ("${WORKDIR}") {
|
|
deleteDir()
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
post {
|
|
// Jenkins pipeline jobs fill slaves on PRs without this :(
|
|
cleanup() {
|
|
script {
|
|
sh label: 'Cleanup workspace', script: '''#!/bin/bash -e
|
|
# See HADOOP-13951
|
|
chmod -R u+rxw "${WORKSPACE}"
|
|
'''
|
|
deleteDir()
|
|
}
|
|
}
|
|
}
|
|
}
|