Jenkins fails the whole build immediately if any stage fails. Hadoop2 tests run before Hadoop3 tests. So Hadoop3 tests will run only if hadoop2 tests pass.
609 lines
18 KiB
Bash
Executable File
609 lines
18 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
# You'll need a local installation of
|
|
# [Apache Yetus' precommit checker](http://yetus.apache.org/documentation/0.1.0/#yetus-precommit)
|
|
# to use this personality.
|
|
#
|
|
# Download from: http://yetus.apache.org/downloads/ . You can either grab the source artifact and
|
|
# build from it, or use the convenience binaries provided on that download page.
|
|
#
|
|
# To run against, e.g. HBASE-15074 you'd then do
|
|
# ```bash
|
|
# test-patch --personality=dev-support/hbase-personality.sh HBASE-15074
|
|
# ```
|
|
#
|
|
# If you want to skip the ~1 hour it'll take to do all the hadoop API checks, use
|
|
# ```bash
|
|
# test-patch --plugins=all,-hadoopcheck --personality=dev-support/hbase-personality.sh HBASE-15074
|
|
# ````
|
|
#
|
|
# pass the `--jenkins` flag if you want to allow test-patch to destructively alter local working
|
|
# directory / branch in order to have things match what the issue patch requests.
|
|
|
|
personality_plugins "all"
|
|
|
|
if ! declare -f "yetus_info" >/dev/null; then
|
|
|
|
function yetus_info
|
|
{
|
|
echo "[$(date) INFO]: $*" 1>&2
|
|
}
|
|
|
|
fi
|
|
|
|
## @description Globals specific to this personality
|
|
## @audience private
|
|
## @stability evolving
|
|
function personality_globals
|
|
{
|
|
BUILDTOOL=maven
|
|
#shellcheck disable=SC2034
|
|
PROJECT_NAME=hbase
|
|
#shellcheck disable=SC2034
|
|
PATCH_BRANCH_DEFAULT=master
|
|
#shellcheck disable=SC2034
|
|
JIRA_ISSUE_RE='^HBASE-[0-9]+$'
|
|
#shellcheck disable=SC2034
|
|
GITHUB_REPO="apache/hbase"
|
|
|
|
# TODO use PATCH_BRANCH to select jdk versions to use.
|
|
|
|
# Override the maven options
|
|
MAVEN_OPTS="${MAVEN_OPTS:-"-Xmx3100M"}"
|
|
}
|
|
|
|
## @description Parse extra arguments required by personalities, if any.
|
|
## @audience private
|
|
## @stability evolving
|
|
function personality_parse_args
|
|
{
|
|
declare i
|
|
|
|
for i in "$@"; do
|
|
case ${i} in
|
|
--exclude-tests-url=*)
|
|
EXCLUDE_TESTS_URL=${i#*=}
|
|
;;
|
|
--include-tests-url=*)
|
|
INCLUDE_TESTS_URL=${i#*=}
|
|
;;
|
|
--hadoop-profile=*)
|
|
HADOOP_PROFILE=${i#*=}
|
|
;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
## @description Queue up modules for this personality
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param repostatus
|
|
## @param testtype
|
|
function personality_modules
|
|
{
|
|
local repostatus=$1
|
|
local testtype=$2
|
|
local extra=""
|
|
local MODULES=(${CHANGED_MODULES[@]})
|
|
|
|
yetus_info "Personality: ${repostatus} ${testtype}"
|
|
|
|
clear_personality_queue
|
|
|
|
extra="-DHBasePatchProcess"
|
|
|
|
if [[ -n "${HADOOP_PROFILE}" ]]; then
|
|
extra="${extra} -Dhadoop.profile=${HADOOP_PROFILE}"
|
|
fi
|
|
|
|
# BUILDMODE value is 'full' when there is no patch to be tested, and we are running checks on
|
|
# full source code instead. In this case, do full compiles, tests, etc instead of per
|
|
# module.
|
|
# Used in nightly runs.
|
|
# If BUILDMODE is 'patch', for unit and compile testtypes, there is no need to run individual
|
|
# modules if root is included. HBASE-18505
|
|
if [[ "${BUILDMODE}" == "full" ]] || \
|
|
[[ ( "${testtype}" == unit || "${testtype}" == compile ) && "${MODULES[*]}" =~ \. ]]; then
|
|
MODULES=(.)
|
|
fi
|
|
|
|
if [[ ${testtype} == mvninstall ]]; then
|
|
# shellcheck disable=SC2086
|
|
personality_enqueue_module . ${extra}
|
|
return
|
|
fi
|
|
|
|
if [[ ${testtype} == findbugs ]]; then
|
|
# Run findbugs on each module individually to diff pre-patch and post-patch results and
|
|
# report new warnings for changed modules only.
|
|
# For some reason, findbugs on root is not working, but running on individual modules is
|
|
# working. For time being, let it run on original list of CHANGED_MODULES. HBASE-19491
|
|
for module in "${CHANGED_MODULES[@]}"; do
|
|
# skip findbugs on hbase-shell and hbase-it. hbase-it has nothing
|
|
# in src/main/java where findbugs goes to look
|
|
if [[ ${module} == hbase-shell ]]; then
|
|
continue
|
|
elif [[ ${module} == hbase-it ]]; then
|
|
continue
|
|
else
|
|
# shellcheck disable=SC2086
|
|
personality_enqueue_module ${module} ${extra}
|
|
fi
|
|
done
|
|
return
|
|
fi
|
|
|
|
# If EXCLUDE_TESTS_URL/INCLUDE_TESTS_URL is set, fetches the url
|
|
# and sets -Dtest.exclude.pattern/-Dtest to exclude/include the
|
|
# tests respectively.
|
|
if [[ ${testtype} == unit ]]; then
|
|
local tests_arg=""
|
|
get_include_exclude_tests_arg tests_arg
|
|
extra="${extra} -PrunAllTests ${tests_arg}"
|
|
|
|
# Inject the jenkins build-id for our surefire invocations
|
|
# Used by zombie detection stuff, even though we're not including that yet.
|
|
if [ -n "${BUILD_ID}" ]; then
|
|
extra="${extra} -Dbuild.id=${BUILD_ID}"
|
|
fi
|
|
fi
|
|
|
|
for module in "${MODULES[@]}"; do
|
|
# shellcheck disable=SC2086
|
|
personality_enqueue_module ${module} ${extra}
|
|
done
|
|
}
|
|
|
|
## @description Uses relevant include/exclude env variable to fetch list of included/excluded
|
|
# tests and sets given variable to arguments to be passes to maven command.
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param name of variable to set with maven arguments
|
|
function get_include_exclude_tests_arg
|
|
{
|
|
local __resultvar=$1
|
|
yetus_info "EXCLUDE_TESTS_URL=${EXCLUDE_TESTS_URL}"
|
|
yetus_info "INCLUDE_TESTS_URL=${INCLUDE_TESTS_URL}"
|
|
if [[ -n "${EXCLUDE_TESTS_URL}" ]]; then
|
|
if wget "${EXCLUDE_TESTS_URL}" -O "excludes"; then
|
|
excludes=$(cat excludes)
|
|
yetus_debug "excludes=${excludes}"
|
|
if [[ -n "${excludes}" ]]; then
|
|
eval "${__resultvar}='-Dtest.exclude.pattern=${excludes}'"
|
|
fi
|
|
rm excludes
|
|
else
|
|
yetus_error "Wget error $? in fetching excludes file from url" \
|
|
"${EXCLUDE_TESTS_URL}. Ignoring and proceeding."
|
|
fi
|
|
elif [[ -n "$INCLUDE_TESTS_URL" ]]; then
|
|
if wget "$INCLUDE_TESTS_URL" -O "includes"; then
|
|
includes=$(cat includes)
|
|
yetus_debug "includes=${includes}"
|
|
if [[ -n "${includes}" ]]; then
|
|
eval "${__resultvar}='-Dtest=${includes}'"
|
|
fi
|
|
rm includes
|
|
else
|
|
yetus_error "Wget error $? in fetching includes file from url" \
|
|
"${INCLUDE_TESTS_URL}. Ignoring and proceeding."
|
|
fi
|
|
fi
|
|
}
|
|
|
|
###################################################
|
|
# Below here are our one-off tests specific to hbase.
|
|
# TODO break them into individual files so it's easier to maintain them?
|
|
|
|
# TODO line length check? could ignore all java files since checkstyle gets them.
|
|
|
|
###################################################
|
|
|
|
add_test_type shadedjars
|
|
|
|
|
|
function shadedjars_initialize
|
|
{
|
|
yetus_debug "initializing shaded client checks."
|
|
maven_add_install shadedjars
|
|
}
|
|
|
|
## @description only run the test if java changes.
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param filename
|
|
function shadedjars_filefilter
|
|
{
|
|
local filename=$1
|
|
|
|
if [[ ${filename} =~ \.java$ ]] || [[ ${filename} =~ pom.xml$ ]]; then
|
|
add_test shadedjars
|
|
fi
|
|
}
|
|
|
|
## @description test the shaded client artifacts
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param repostatus
|
|
function shadedjars_rebuild
|
|
{
|
|
local repostatus=$1
|
|
local logfile="${PATCH_DIR}/${repostatus}-shadedjars.txt"
|
|
|
|
if ! verify_needed_test shadedjars; then
|
|
return 0
|
|
fi
|
|
|
|
big_console_header "Checking shaded client builds on ${repostatus}"
|
|
|
|
echo_and_redirect "${logfile}" \
|
|
"${MAVEN}" "${MAVEN_ARGS[@]}" clean verify -fae --batch-mode \
|
|
-pl hbase-shaded/hbase-shaded-check-invariants -am \
|
|
-Dtest=NoUnitTests -DHBasePatchProcess -Prelease \
|
|
-Dmaven.javadoc.skip=true -Dcheckstyle.skip=true -Dfindbugs.skip=true
|
|
|
|
count=$(${GREP} -c '\[ERROR\]' "${logfile}")
|
|
if [[ ${count} -gt 0 ]]; then
|
|
add_vote_table -1 shadedjars "${repostatus} has ${count} errors when building our shaded downstream artifacts."
|
|
return 1
|
|
fi
|
|
|
|
add_vote_table +1 shadedjars "${repostatus} has no errors when building our shaded downstream artifacts."
|
|
return 0
|
|
}
|
|
|
|
###################################################
|
|
|
|
add_test_type hadoopcheck
|
|
|
|
## @description hadoopcheck file filter
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param filename
|
|
function hadoopcheck_filefilter
|
|
{
|
|
local filename=$1
|
|
|
|
if [[ ${filename} =~ \.java$ ]] || [[ ${filename} =~ pom.xml$ ]]; then
|
|
add_test hadoopcheck
|
|
fi
|
|
}
|
|
|
|
## @description Parse args to detect if QUICK_HADOOPCHECK mode is enabled.
|
|
## @audience private
|
|
## @stability evolving
|
|
function hadoopcheck_parse_args
|
|
{
|
|
declare i
|
|
|
|
for i in "$@"; do
|
|
case ${i} in
|
|
--quick-hadoopcheck)
|
|
QUICK_HADOOPCHECK=true
|
|
;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
## @description Adds QUICK_HADOOPCHECK env variable to DOCKER_EXTRAARGS.
|
|
## @audience private
|
|
## @stability evolving
|
|
function hadoopcheck_docker_support
|
|
{
|
|
DOCKER_EXTRAARGS=("${DOCKER_EXTRAARGS[@]}" "--env=QUICK_HADOOPCHECK=${QUICK_HADOOPCHECK}")
|
|
}
|
|
|
|
## @description hadoopcheck test
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param repostatus
|
|
function hadoopcheck_rebuild
|
|
{
|
|
local repostatus=$1
|
|
local hadoopver
|
|
local logfile
|
|
local count
|
|
local result=0
|
|
local hbase_hadoop2_versions
|
|
local hbase_hadoop3_versions
|
|
|
|
if [[ "${repostatus}" = branch ]]; then
|
|
return 0
|
|
fi
|
|
|
|
if ! verify_needed_test hadoopcheck; then
|
|
return 0
|
|
fi
|
|
|
|
big_console_header "Compiling against various Hadoop versions"
|
|
|
|
# All supported Hadoop versions that we want to test the compilation with
|
|
# See the Hadoop section on prereqs in the HBase Reference Guide
|
|
hbase_common_hadoop2_versions="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4"
|
|
if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
|
|
yetus_info "Setting Hadoop versions to test based on branch-1-ish rules."
|
|
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
|
|
hbase_hadoop2_versions="2.4.1 2.5.2 2.6.5 2.7.4"
|
|
else
|
|
hbase_hadoop2_versions="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 ${hbase_common_hadoop2_versions}"
|
|
fi
|
|
hbase_hadoop3_versions=""
|
|
else # master or a feature branch
|
|
yetus_info "Setting Hadoop versions to test based on branch-2/master/feature branch rules."
|
|
if [[ "${QUICK_HADOOPCHECK}" == "true" ]]; then
|
|
hbase_hadoop2_versions="2.6.5 2.7.4"
|
|
else
|
|
hbase_hadoop2_versions="${hbase_common_hadoop2_versions}"
|
|
fi
|
|
hbase_hadoop3_versions="3.0.0"
|
|
fi
|
|
|
|
export MAVEN_OPTS="${MAVEN_OPTS}"
|
|
for hadoopver in ${hbase_hadoop2_versions}; do
|
|
logfile="${PATCH_DIR}/patch-javac-${hadoopver}.txt"
|
|
echo_and_redirect "${logfile}" \
|
|
"${MAVEN}" clean install \
|
|
-DskipTests -DHBasePatchProcess \
|
|
-Dhadoop-two.version="${hadoopver}"
|
|
count=$(${GREP} -c '\[ERROR\]' "${logfile}")
|
|
if [[ ${count} -gt 0 ]]; then
|
|
add_vote_table -1 hadoopcheck "${BUILDMODEMSG} causes ${count} errors with Hadoop v${hadoopver}."
|
|
((result=result+1))
|
|
fi
|
|
done
|
|
|
|
for hadoopver in ${hbase_hadoop3_versions}; do
|
|
logfile="${PATCH_DIR}/patch-javac-${hadoopver}.txt"
|
|
echo_and_redirect "${logfile}" \
|
|
"${MAVEN}" clean install \
|
|
-DskipTests -DHBasePatchProcess \
|
|
-Dhadoop-three.version="${hadoopver}" \
|
|
-Dhadoop.profile=3.0
|
|
count=$(${GREP} -c '\[ERROR\]' "${logfile}")
|
|
if [[ ${count} -gt 0 ]]; then
|
|
add_vote_table -1 hadoopcheck "${BUILDMODEMSG} causes ${count} errors with Hadoop v${hadoopver}."
|
|
((result=result+1))
|
|
fi
|
|
done
|
|
|
|
if [[ ${result} -gt 0 ]]; then
|
|
return 1
|
|
fi
|
|
|
|
if [[ -n "${hbase_hadoop3_versions}" ]]; then
|
|
add_vote_table +1 hadoopcheck "Patch does not cause any errors with Hadoop ${hbase_hadoop2_versions} or ${hbase_hadoop3_versions}."
|
|
else
|
|
add_vote_table +1 hadoopcheck "Patch does not cause any errors with Hadoop ${hbase_hadoop2_versions}."
|
|
fi
|
|
return 0
|
|
}
|
|
|
|
######################################
|
|
|
|
# TODO if we need the protoc check, we probably need to check building all the modules that rely on hbase-protocol
|
|
add_test_type hbaseprotoc
|
|
|
|
## @description hbaseprotoc file filter
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param filename
|
|
function hbaseprotoc_filefilter
|
|
{
|
|
local filename=$1
|
|
|
|
if [[ ${filename} =~ \.proto$ ]]; then
|
|
add_test hbaseprotoc
|
|
fi
|
|
}
|
|
|
|
## @description check hbase proto compilation
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param repostatus
|
|
function hbaseprotoc_rebuild
|
|
{
|
|
declare repostatus=$1
|
|
declare i=0
|
|
declare fn
|
|
declare module
|
|
declare logfile
|
|
declare count
|
|
declare result
|
|
|
|
if [[ "${repostatus}" = branch ]]; then
|
|
return 0
|
|
fi
|
|
|
|
if ! verify_needed_test hbaseprotoc; then
|
|
return 0
|
|
fi
|
|
|
|
big_console_header "HBase protoc plugin: ${BUILDMODE}"
|
|
|
|
start_clock
|
|
|
|
personality_modules patch hbaseprotoc
|
|
# Need to run 'install' instead of 'compile' because shading plugin
|
|
# is hooked-up to 'install'; else hbase-protocol-shaded is left with
|
|
# half of its process done.
|
|
modules_workers patch hbaseprotoc install -DskipTests -Pcompile-protobuf -X -DHBasePatchProcess
|
|
|
|
# shellcheck disable=SC2153
|
|
until [[ $i -eq "${#MODULE[@]}" ]]; do
|
|
if [[ ${MODULE_STATUS[${i}]} == -1 ]]; then
|
|
((result=result+1))
|
|
((i=i+1))
|
|
continue
|
|
fi
|
|
module=${MODULE[$i]}
|
|
fn=$(module_file_fragment "${module}")
|
|
logfile="${PATCH_DIR}/patch-hbaseprotoc-${fn}.txt"
|
|
|
|
count=$(${GREP} -c '\[ERROR\]' "${logfile}")
|
|
|
|
if [[ ${count} -gt 0 ]]; then
|
|
module_status ${i} -1 "patch-hbaseprotoc-${fn}.txt" "Patch generated "\
|
|
"${count} new protoc errors in ${module}."
|
|
((result=result+1))
|
|
fi
|
|
((i=i+1))
|
|
done
|
|
|
|
modules_messages patch hbaseprotoc true
|
|
if [[ ${result} -gt 0 ]]; then
|
|
return 1
|
|
fi
|
|
return 0
|
|
}
|
|
|
|
######################################
|
|
|
|
add_test_type hbaseanti
|
|
|
|
## @description hbaseanti file filter
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param filename
|
|
function hbaseanti_filefilter
|
|
{
|
|
local filename=$1
|
|
|
|
if [[ ${filename} =~ \.java$ ]]; then
|
|
add_test hbaseanti
|
|
fi
|
|
}
|
|
|
|
## @description hbaseanti patch file check
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param filename
|
|
function hbaseanti_patchfile
|
|
{
|
|
local patchfile=$1
|
|
local warnings
|
|
local result
|
|
|
|
if [[ "${BUILDMODE}" = full ]]; then
|
|
return 0
|
|
fi
|
|
|
|
if ! verify_needed_test hbaseanti; then
|
|
return 0
|
|
fi
|
|
|
|
big_console_header "Checking for known anti-patterns"
|
|
|
|
start_clock
|
|
|
|
warnings=$(${GREP} -c 'new TreeMap<byte.*()' "${patchfile}")
|
|
if [[ ${warnings} -gt 0 ]]; then
|
|
add_vote_table -1 hbaseanti "" "The patch appears to have anti-pattern where BYTES_COMPARATOR was omitted."
|
|
((result=result+1))
|
|
fi
|
|
|
|
warnings=$(${GREP} -c 'import org.apache.hadoop.classification' "${patchfile}")
|
|
if [[ ${warnings} -gt 0 ]]; then
|
|
add_vote_table -1 hbaseanti "" "The patch appears use Hadoop classification instead of HBase."
|
|
((result=result+1))
|
|
fi
|
|
|
|
warnings=$(${GREP} -c 'import org.codehaus.jackson' "${patchfile}")
|
|
if [[ ${warnings} -gt 0 ]]; then
|
|
add_vote_table -1 hbaseanti "" "The patch appears use Jackson 1 classes/annotations."
|
|
((result=result+1))
|
|
fi
|
|
|
|
if [[ ${result} -gt 0 ]]; then
|
|
return 1
|
|
fi
|
|
|
|
add_vote_table +1 hbaseanti "" "Patch does not have any anti-patterns."
|
|
return 0
|
|
}
|
|
|
|
|
|
## @description hbase custom mvnsite file filter. See HBASE-15042
|
|
## @audience private
|
|
## @stability evolving
|
|
## @param filename
|
|
function mvnsite_filefilter
|
|
{
|
|
local filename=$1
|
|
|
|
if [[ ${BUILDTOOL} = maven ]]; then
|
|
if [[ ${filename} =~ src/site || ${filename} =~ src/main/asciidoc ]]; then
|
|
yetus_debug "tests/mvnsite: ${filename}"
|
|
add_test mvnsite
|
|
fi
|
|
fi
|
|
}
|
|
|
|
## This is named so that yetus will check us right after running tests.
|
|
## Essentially, we check for normal failures and then we look for zombies.
|
|
#function hbase_unit_logfilter
|
|
#{
|
|
# declare testtype="unit"
|
|
# declare input=$1
|
|
# declare output=$2
|
|
# declare processes
|
|
# declare process_output
|
|
# declare zombies
|
|
# declare zombie_count=0
|
|
# declare zombie_process
|
|
#
|
|
# yetus_debug "in hbase-specific unit logfilter."
|
|
#
|
|
# # pass-through to whatever is counting actual failures
|
|
# if declare -f ${BUILDTOOL}_${testtype}_logfilter >/dev/null; then
|
|
# "${BUILDTOOL}_${testtype}_logfilter" "${input}" "${output}"
|
|
# elif declare -f ${testtype}_logfilter >/dev/null; then
|
|
# "${testtype}_logfilter" "${input}" "${output}"
|
|
# fi
|
|
#
|
|
# start_clock
|
|
# if [ -n "${BUILD_ID}" ]; then
|
|
# yetus_debug "Checking for zombie test processes."
|
|
# processes=$(jps -v | "${GREP}" surefirebooter | "${GREP}" -e "hbase.build.id=${BUILD_ID}")
|
|
# if [ -n "${processes}" ] && [ "$(echo "${processes}" | wc -l)" -gt 0 ]; then
|
|
# yetus_warn "Found some suspicious process(es). Waiting a bit to see if they're just slow to stop."
|
|
# yetus_debug "${processes}"
|
|
# sleep 30
|
|
# #shellcheck disable=SC2016
|
|
# for pid in $(echo "${processes}"| ${AWK} '{print $1}'); do
|
|
# # Test our zombie still running (and that it still an hbase build item)
|
|
# process_output=$(ps -p "${pid}" | tail +2 | "${GREP}" -e "hbase.build.id=${BUILD_ID}")
|
|
# if [[ -n "${process_output}" ]]; then
|
|
# yetus_error "Zombie: ${process_output}"
|
|
# ((zombie_count = zombie_count + 1))
|
|
# zombie_process=$(jstack "${pid}" | "${GREP}" -e "\.Test" | "${GREP}" -e "\.java"| head -3)
|
|
# zombies="${zombies} ${zombie_process}"
|
|
# fi
|
|
# done
|
|
# fi
|
|
# if [ "${zombie_count}" -ne 0 ]; then
|
|
# add_vote_table -1 zombies "There are ${zombie_count} zombie test(s)"
|
|
# populate_test_table "zombie unit tests" "${zombies}"
|
|
# else
|
|
# yetus_info "Zombie check complete. All test runs exited normally."
|
|
# stop_clock
|
|
# fi
|
|
# else
|
|
# add_vote_table -0 zombies "There is no BUILD_ID env variable; can't check for zombies."
|
|
# fi
|
|
#
|
|
#}
|