2015-12-28 00:26:49 -05:00
|
|
|
#!/usr/bin/env bash
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
# You'll need a local installation of
|
|
|
|
# [Apache Yetus' precommit checker](http://yetus.apache.org/documentation/0.1.0/#yetus-precommit)
|
|
|
|
# to use this personality.
|
|
|
|
#
|
|
|
|
# Download from: http://yetus.apache.org/downloads/ . You can either grab the source artifact and
|
|
|
|
# build from it, or use the convenience binaries provided on that download page.
|
|
|
|
#
|
|
|
|
# To run against, e.g. HBASE-15074 you'd then do
|
|
|
|
# ```bash
|
|
|
|
# test-patch --personality=dev-support/hbase-personality.sh HBASE-15074
|
|
|
|
# ```
|
|
|
|
#
|
|
|
|
# If you want to skip the ~1 hour it'll take to do all the hadoop API checks, use
|
|
|
|
# ```bash
|
|
|
|
# test-patch --plugins=all,-hadoopcheck --personality=dev-support/hbase-personality.sh HBASE-15074
|
|
|
|
# ````
|
|
|
|
#
|
|
|
|
# pass the `--jenkins` flag if you want to allow test-patch to destructively alter local working
|
|
|
|
# directory / branch in order to have things match what the issue patch requests.
|
|
|
|
|
|
|
|
personality_plugins "all"
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
## @description Globals specific to this personality
|
|
|
|
## @audience private
|
|
|
|
## @stability evolving
|
2015-12-28 00:26:49 -05:00
|
|
|
function personality_globals
|
|
|
|
{
|
2016-06-17 13:54:30 -04:00
|
|
|
BUILDTOOL=maven
|
2015-12-28 00:26:49 -05:00
|
|
|
#shellcheck disable=SC2034
|
|
|
|
PROJECT_NAME=hbase
|
|
|
|
#shellcheck disable=SC2034
|
|
|
|
PATCH_BRANCH_DEFAULT=master
|
|
|
|
#shellcheck disable=SC2034
|
|
|
|
JIRA_ISSUE_RE='^HBASE-[0-9]+$'
|
|
|
|
#shellcheck disable=SC2034
|
|
|
|
GITHUB_REPO="apache/hbase"
|
|
|
|
|
|
|
|
# All supported Hadoop versions that we want to test the compilation with
|
2017-07-06 15:56:02 -04:00
|
|
|
# See the Hadoop section on prereqs in the HBase Reference Guide
|
|
|
|
if [[ "${PATCH_BRANCH}" = branch-1* ]]; then
|
|
|
|
HBASE_HADOOP2_VERSIONS="2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
|
|
|
|
HBASE_HADOOP3_VERSIONS=""
|
|
|
|
elif [[ ${PATCH_BRANCH} = branch-2* ]]; then
|
|
|
|
HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
|
2017-07-12 14:36:27 -04:00
|
|
|
HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
|
2017-07-06 15:56:02 -04:00
|
|
|
else # master or a feature branch
|
|
|
|
HBASE_HADOOP2_VERSIONS="2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3"
|
2017-07-12 14:36:27 -04:00
|
|
|
HBASE_HADOOP3_VERSIONS="3.0.0-alpha4"
|
2017-07-06 15:56:02 -04:00
|
|
|
fi
|
2015-12-28 00:26:49 -05:00
|
|
|
|
|
|
|
# TODO use PATCH_BRANCH to select jdk versions to use.
|
|
|
|
|
|
|
|
# Override the maven options
|
|
|
|
MAVEN_OPTS="${MAVEN_OPTS:-"-Xmx3100M"}"
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
## @description Queue up modules for this personality
|
|
|
|
## @audience private
|
|
|
|
## @stability evolving
|
|
|
|
## @param repostatus
|
|
|
|
## @param testtype
|
2015-12-28 00:26:49 -05:00
|
|
|
function personality_modules
|
|
|
|
{
|
|
|
|
local repostatus=$1
|
|
|
|
local testtype=$2
|
|
|
|
local extra=""
|
|
|
|
|
|
|
|
yetus_debug "Personality: ${repostatus} ${testtype}"
|
|
|
|
|
|
|
|
clear_personality_queue
|
|
|
|
|
|
|
|
extra="-DHBasePatchProcess"
|
|
|
|
|
|
|
|
if [[ ${repostatus} == branch
|
2016-06-17 13:54:30 -04:00
|
|
|
&& ${testtype} == mvninstall ]] ||
|
|
|
|
[[ "${BUILDMODE}" == full ]];then
|
|
|
|
personality_enqueue_module . ${extra}
|
|
|
|
return
|
2015-12-28 00:26:49 -05:00
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ ${testtype} = findbugs ]]; then
|
2016-06-17 13:54:30 -04:00
|
|
|
for module in "${CHANGED_MODULES[@]}"; do
|
2016-01-31 10:07:21 -05:00
|
|
|
# skip findbugs on hbase-shell and hbase-it. hbase-it has nothing
|
|
|
|
# in src/main/java where findbugs goes to look
|
2015-12-28 00:26:49 -05:00
|
|
|
if [[ ${module} == hbase-shell ]]; then
|
|
|
|
continue
|
2016-01-31 10:07:21 -05:00
|
|
|
elif [[ ${module} == hbase-it ]]; then
|
|
|
|
continue
|
2015-12-28 00:26:49 -05:00
|
|
|
else
|
|
|
|
# shellcheck disable=SC2086
|
|
|
|
personality_enqueue_module ${module} ${extra}
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
return
|
|
|
|
fi
|
|
|
|
|
2016-04-21 18:34:12 -04:00
|
|
|
# If EXCLUDE_TESTS_URL/INCLUDE_TESTS_URL is set, fetches the url
|
|
|
|
# and sets -Dtest.exclude.pattern/-Dtest to exclude/include the
|
|
|
|
# tests respectively.
|
2015-12-28 00:26:49 -05:00
|
|
|
if [[ ${testtype} = unit ]]; then
|
|
|
|
extra="${extra} -PrunAllTests"
|
2016-04-21 18:34:12 -04:00
|
|
|
if [[ -n "$EXCLUDE_TESTS_URL" ]]; then
|
|
|
|
wget "$EXCLUDE_TESTS_URL" -O "excludes"
|
|
|
|
if [[ $? -eq 0 ]]; then
|
|
|
|
excludes=$(cat excludes)
|
|
|
|
if [[ -n "${excludes}" ]]; then
|
|
|
|
extra="${extra} -Dtest.exclude.pattern=${excludes}"
|
|
|
|
fi
|
2016-05-30 23:22:29 -04:00
|
|
|
rm excludes
|
2016-04-21 18:34:12 -04:00
|
|
|
else
|
|
|
|
echo "Wget error $? in fetching excludes file from url" \
|
|
|
|
"${EXCLUDE_TESTS_URL}. Ignoring and proceeding."
|
|
|
|
fi
|
|
|
|
elif [[ -n "$INCLUDE_TESTS_URL" ]]; then
|
|
|
|
wget "$INCLUDE_TESTS_URL" -O "includes"
|
|
|
|
if [[ $? -eq 0 ]]; then
|
|
|
|
includes=$(cat includes)
|
|
|
|
if [[ -n "${includes}" ]]; then
|
|
|
|
extra="${extra} -Dtest=${includes}"
|
|
|
|
fi
|
2016-05-30 23:22:29 -04:00
|
|
|
rm includes
|
2016-04-21 18:34:12 -04:00
|
|
|
else
|
|
|
|
echo "Wget error $? in fetching includes file from url" \
|
|
|
|
"${INCLUDE_TESTS_URL}. Ignoring and proceeding."
|
|
|
|
fi
|
|
|
|
fi
|
2015-12-28 00:26:49 -05:00
|
|
|
|
|
|
|
# Inject the jenkins build-id for our surefire invocations
|
|
|
|
# Used by zombie detection stuff, even though we're not including that yet.
|
|
|
|
if [ -n "${BUILD_ID}" ]; then
|
|
|
|
extra="${extra} -Dbuild.id=${BUILD_ID}"
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
for module in "${CHANGED_MODULES[@]}"; do
|
2015-12-28 00:26:49 -05:00
|
|
|
# shellcheck disable=SC2086
|
|
|
|
personality_enqueue_module ${module} ${extra}
|
|
|
|
done
|
|
|
|
}
|
|
|
|
|
|
|
|
###################################################
|
|
|
|
# Below here are our one-off tests specific to hbase.
|
|
|
|
# TODO break them into individual files so it's easier to maintain them?
|
|
|
|
|
|
|
|
# TODO line length check? could ignore all java files since checkstyle gets them.
|
|
|
|
|
|
|
|
###################################################
|
|
|
|
|
|
|
|
add_test_type hadoopcheck
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
## @description hadoopcheck file filter
|
|
|
|
## @audience private
|
|
|
|
## @stability evolving
|
|
|
|
## @param filename
|
2015-12-28 00:26:49 -05:00
|
|
|
function hadoopcheck_filefilter
|
|
|
|
{
|
|
|
|
local filename=$1
|
|
|
|
|
|
|
|
if [[ ${filename} =~ \.java$ ]]; then
|
|
|
|
add_test hadoopcheck
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
## @description hadoopcheck test
|
|
|
|
## @audience private
|
|
|
|
## @stability evolving
|
|
|
|
## @param repostatus
|
2015-12-28 00:26:49 -05:00
|
|
|
function hadoopcheck_rebuild
|
|
|
|
{
|
|
|
|
local repostatus=$1
|
|
|
|
local hadoopver
|
|
|
|
local logfile
|
|
|
|
local count
|
|
|
|
local result=0
|
2016-10-23 20:46:27 -04:00
|
|
|
local hbase_hadoop2_versions
|
|
|
|
local hbase_hadoop3_versions
|
2015-12-28 00:26:49 -05:00
|
|
|
|
|
|
|
if [[ "${repostatus}" = branch ]]; then
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
big_console_header "Compiling against various Hadoop versions"
|
|
|
|
|
2017-07-06 15:56:02 -04:00
|
|
|
hbase_hadoop2_versions=${HBASE_HADOOP2_VERSIONS}
|
|
|
|
hbase_hadoop3_versions=${HBASE_HADOOP3_VERSIONS}
|
|
|
|
|
2016-10-23 20:46:27 -04:00
|
|
|
|
2015-12-28 00:26:49 -05:00
|
|
|
export MAVEN_OPTS="${MAVEN_OPTS}"
|
2016-10-23 20:46:27 -04:00
|
|
|
for hadoopver in ${hbase_hadoop2_versions}; do
|
2015-12-28 00:26:49 -05:00
|
|
|
logfile="${PATCH_DIR}/patch-javac-${hadoopver}.txt"
|
|
|
|
echo_and_redirect "${logfile}" \
|
|
|
|
"${MAVEN}" clean install \
|
|
|
|
-DskipTests -DHBasePatchProcess \
|
|
|
|
-Dhadoop-two.version="${hadoopver}"
|
2016-12-02 00:42:19 -05:00
|
|
|
count=$(${GREP} -c '\[ERROR\]' "${logfile}")
|
2015-12-28 00:26:49 -05:00
|
|
|
if [[ ${count} -gt 0 ]]; then
|
2016-06-17 13:54:30 -04:00
|
|
|
add_vote_table -1 hadoopcheck "${BUILDMODEMSG} causes ${count} errors with Hadoop v${hadoopver}."
|
2015-12-28 00:26:49 -05:00
|
|
|
((result=result+1))
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2016-10-23 20:46:27 -04:00
|
|
|
for hadoopver in ${hbase_hadoop3_versions}; do
|
2016-09-29 17:56:01 -04:00
|
|
|
logfile="${PATCH_DIR}/patch-javac-${hadoopver}.txt"
|
|
|
|
echo_and_redirect "${logfile}" \
|
|
|
|
"${MAVEN}" clean install \
|
|
|
|
-DskipTests -DHBasePatchProcess \
|
|
|
|
-Dhadoop-three.version="${hadoopver} \
|
|
|
|
-Dhadoop.profile=3.0"
|
2016-12-02 00:42:19 -05:00
|
|
|
count=$(${GREP} -c '\[ERROR\]' "${logfile}")
|
2016-09-29 17:56:01 -04:00
|
|
|
if [[ ${count} -gt 0 ]]; then
|
|
|
|
add_vote_table -1 hadoopcheck "${BUILDMODEMSG} causes ${count} errors with Hadoop v${hadoopver}."
|
|
|
|
((result=result+1))
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2015-12-28 00:26:49 -05:00
|
|
|
if [[ ${result} -gt 0 ]]; then
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
2016-10-23 20:46:27 -04:00
|
|
|
if [[ -n "${hbase_hadoop3_versions}" ]]; then
|
|
|
|
add_vote_table +1 hadoopcheck "Patch does not cause any errors with Hadoop ${hbase_hadoop2_versions} or ${hbase_hadoop3_versions}."
|
|
|
|
else
|
|
|
|
add_vote_table +1 hadoopcheck "Patch does not cause any errors with Hadoop ${hbase_hadoop2_versions}."
|
|
|
|
fi
|
2015-12-28 00:26:49 -05:00
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
######################################
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
# TODO if we need the protoc check, we probably need to check building all the modules that rely on hbase-protocol
|
2015-12-28 00:26:49 -05:00
|
|
|
add_test_type hbaseprotoc
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
## @description hbaseprotoc file filter
|
|
|
|
## @audience private
|
|
|
|
## @stability evolving
|
|
|
|
## @param filename
|
2015-12-28 00:26:49 -05:00
|
|
|
function hbaseprotoc_filefilter
|
|
|
|
{
|
|
|
|
local filename=$1
|
|
|
|
|
|
|
|
if [[ ${filename} =~ \.proto$ ]]; then
|
|
|
|
add_test hbaseprotoc
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
## @description hadoopcheck test
|
|
|
|
## @audience private
|
|
|
|
## @stability evolving
|
|
|
|
## @param repostatus
|
2015-12-28 00:26:49 -05:00
|
|
|
function hbaseprotoc_rebuild
|
|
|
|
{
|
2016-06-17 13:54:30 -04:00
|
|
|
declare repostatus=$1
|
|
|
|
declare i=0
|
|
|
|
declare fn
|
|
|
|
declare module
|
|
|
|
declare logfile
|
|
|
|
declare count
|
|
|
|
declare result
|
2015-12-28 00:26:49 -05:00
|
|
|
|
|
|
|
if [[ "${repostatus}" = branch ]]; then
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
|
2016-10-28 18:30:22 -04:00
|
|
|
if ! verify_needed_test hbaseprotoc; then
|
2015-12-28 00:26:49 -05:00
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
big_console_header "HBase protoc plugin: ${BUILDMODE}"
|
2015-12-28 00:26:49 -05:00
|
|
|
|
|
|
|
start_clock
|
|
|
|
|
|
|
|
personality_modules patch hbaseprotoc
|
2016-11-08 15:49:00 -05:00
|
|
|
# Need to run 'install' instead of 'compile' because shading plugin
|
|
|
|
# is hooked-up to 'install'; else hbase-protocol-shaded is left with
|
|
|
|
# half of its process done.
|
|
|
|
modules_workers patch hbaseprotoc install -DskipTests -Pcompile-protobuf -X -DHBasePatchProcess
|
2015-12-28 00:26:49 -05:00
|
|
|
|
|
|
|
# shellcheck disable=SC2153
|
2016-06-17 13:54:30 -04:00
|
|
|
until [[ $i -eq "${#MODULE[@]}" ]]; do
|
2015-12-28 00:26:49 -05:00
|
|
|
if [[ ${MODULE_STATUS[${i}]} == -1 ]]; then
|
|
|
|
((result=result+1))
|
|
|
|
((i=i+1))
|
|
|
|
continue
|
|
|
|
fi
|
|
|
|
module=${MODULE[$i]}
|
|
|
|
fn=$(module_file_fragment "${module}")
|
|
|
|
logfile="${PATCH_DIR}/patch-hbaseprotoc-${fn}.txt"
|
|
|
|
|
2016-12-02 00:42:19 -05:00
|
|
|
count=$(${GREP} -c '\[ERROR\]' "${logfile}")
|
2015-12-28 00:26:49 -05:00
|
|
|
|
|
|
|
if [[ ${count} -gt 0 ]]; then
|
|
|
|
module_status ${i} -1 "patch-hbaseprotoc-${fn}.txt" "Patch generated "\
|
|
|
|
"${count} new protoc errors in ${module}."
|
|
|
|
((result=result+1))
|
|
|
|
fi
|
|
|
|
((i=i+1))
|
|
|
|
done
|
|
|
|
|
|
|
|
modules_messages patch hbaseprotoc true
|
|
|
|
if [[ ${result} -gt 0 ]]; then
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
######################################
|
|
|
|
|
|
|
|
add_test_type hbaseanti
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
## @description hbaseanti file filter
|
|
|
|
## @audience private
|
|
|
|
## @stability evolving
|
|
|
|
## @param filename
|
2015-12-28 00:26:49 -05:00
|
|
|
function hbaseanti_filefilter
|
|
|
|
{
|
|
|
|
local filename=$1
|
|
|
|
|
|
|
|
if [[ ${filename} =~ \.java$ ]]; then
|
|
|
|
add_test hbaseanti
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
## @description hbaseanti patch file check
|
|
|
|
## @audience private
|
|
|
|
## @stability evolving
|
|
|
|
## @param filename
|
2015-12-28 00:26:49 -05:00
|
|
|
function hbaseanti_patchfile
|
|
|
|
{
|
|
|
|
local patchfile=$1
|
|
|
|
local warnings
|
|
|
|
local result
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
if [[ "${BUILDMODE}" = full ]]; then
|
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
|
2016-10-28 18:30:22 -04:00
|
|
|
if ! verify_needed_test hbaseanti; then
|
2015-12-28 00:26:49 -05:00
|
|
|
return 0
|
|
|
|
fi
|
|
|
|
|
|
|
|
big_console_header "Checking for known anti-patterns"
|
|
|
|
|
|
|
|
start_clock
|
|
|
|
|
|
|
|
warnings=$(${GREP} 'new TreeMap<byte.*()' "${patchfile}")
|
|
|
|
if [[ ${warnings} -gt 0 ]]; then
|
|
|
|
add_vote_table -1 hbaseanti "" "The patch appears to have anti-pattern where BYTES_COMPARATOR was omitted: ${warnings}."
|
|
|
|
((result=result+1))
|
|
|
|
fi
|
|
|
|
|
|
|
|
warnings=$(${GREP} 'import org.apache.hadoop.classification' "${patchfile}")
|
|
|
|
if [[ ${warnings} -gt 0 ]]; then
|
|
|
|
add_vote_table -1 hbaseanti "" "The patch appears use Hadoop classification instead of HBase: ${warnings}."
|
|
|
|
((result=result+1))
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [[ ${result} -gt 0 ]]; then
|
|
|
|
return 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
add_vote_table +1 hbaseanti "" "Patch does not have any anti-patterns."
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2016-06-17 13:54:30 -04:00
|
|
|
|
|
|
|
## @description hbase custom mvnsite file filter. See HBASE-15042
|
|
|
|
## @audience private
|
|
|
|
## @stability evolving
|
|
|
|
## @param filename
|
2015-12-28 00:26:49 -05:00
|
|
|
function mvnsite_filefilter
|
|
|
|
{
|
|
|
|
local filename=$1
|
|
|
|
|
|
|
|
if [[ ${BUILDTOOL} = maven ]]; then
|
|
|
|
if [[ ${filename} =~ src/main/site || ${filename} =~ src/main/asciidoc ]]; then
|
|
|
|
yetus_debug "tests/mvnsite: ${filename}"
|
|
|
|
add_test mvnsite
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
|
|
|
## This is named so that yetus will check us right after running tests.
|
|
|
|
## Essentially, we check for normal failures and then we look for zombies.
|
|
|
|
#function hbase_unit_logfilter
|
|
|
|
#{
|
|
|
|
# declare testtype="unit"
|
|
|
|
# declare input=$1
|
|
|
|
# declare output=$2
|
|
|
|
# declare processes
|
|
|
|
# declare process_output
|
|
|
|
# declare zombies
|
|
|
|
# declare zombie_count=0
|
|
|
|
# declare zombie_process
|
|
|
|
#
|
|
|
|
# yetus_debug "in hbase-specific unit logfilter."
|
|
|
|
#
|
|
|
|
# # pass-through to whatever is counting actual failures
|
|
|
|
# if declare -f ${BUILDTOOL}_${testtype}_logfilter >/dev/null; then
|
|
|
|
# "${BUILDTOOL}_${testtype}_logfilter" "${input}" "${output}"
|
|
|
|
# elif declare -f ${testtype}_logfilter >/dev/null; then
|
|
|
|
# "${testtype}_logfilter" "${input}" "${output}"
|
|
|
|
# fi
|
|
|
|
#
|
|
|
|
# start_clock
|
|
|
|
# if [ -n "${BUILD_ID}" ]; then
|
|
|
|
# yetus_debug "Checking for zombie test processes."
|
|
|
|
# processes=$(jps -v | "${GREP}" surefirebooter | "${GREP}" -e "hbase.build.id=${BUILD_ID}")
|
|
|
|
# if [ -n "${processes}" ] && [ "$(echo "${processes}" | wc -l)" -gt 0 ]; then
|
|
|
|
# yetus_warn "Found some suspicious process(es). Waiting a bit to see if they're just slow to stop."
|
|
|
|
# yetus_debug "${processes}"
|
|
|
|
# sleep 30
|
|
|
|
# #shellcheck disable=SC2016
|
|
|
|
# for pid in $(echo "${processes}"| ${AWK} '{print $1}'); do
|
|
|
|
# # Test our zombie still running (and that it still an hbase build item)
|
|
|
|
# process_output=$(ps -p "${pid}" | tail +2 | "${GREP}" -e "hbase.build.id=${BUILD_ID}")
|
|
|
|
# if [[ -n "${process_output}" ]]; then
|
|
|
|
# yetus_error "Zombie: ${process_output}"
|
|
|
|
# ((zombie_count = zombie_count + 1))
|
|
|
|
# zombie_process=$(jstack "${pid}" | "${GREP}" -e "\.Test" | "${GREP}" -e "\.java"| head -3)
|
|
|
|
# zombies="${zombies} ${zombie_process}"
|
|
|
|
# fi
|
|
|
|
# done
|
|
|
|
# fi
|
|
|
|
# if [ "${zombie_count}" -ne 0 ]; then
|
|
|
|
# add_vote_table -1 zombies "There are ${zombie_count} zombie test(s)"
|
|
|
|
# populate_test_table "zombie unit tests" "${zombies}"
|
|
|
|
# else
|
|
|
|
# yetus_info "Zombie check complete. All test runs exited normally."
|
|
|
|
# stop_clock
|
|
|
|
# fi
|
|
|
|
# else
|
|
|
|
# add_vote_table -0 zombies "There is no BUILD_ID env variable; can't check for zombies."
|
|
|
|
# fi
|
|
|
|
#
|
|
|
|
#}
|