2015-12-28 00:26:49 -05:00
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# You'll need a local installation of
# [Apache Yetus' precommit checker](http://yetus.apache.org/documentation/0.1.0/#yetus-precommit)
# to use this personality.
#
# Download from: http://yetus.apache.org/downloads/ . You can either grab the source artifact and
# build from it, or use the convenience binaries provided on that download page.
#
# To run against, e.g. HBASE-15074 you'd then do
# ```bash
# test-patch --personality=dev-support/hbase-personality.sh HBASE-15074
# ```
#
# If you want to skip the ~1 hour it'll take to do all the hadoop API checks, use
# ```bash
# test-patch --plugins=all,-hadoopcheck --personality=dev-support/hbase-personality.sh HBASE-15074
# ````
#
2019-09-06 08:56:05 -04:00
# pass the `--sentinel` flag if you want to allow test-patch to destructively alter local working
2015-12-28 00:26:49 -05:00
# directory / branch in order to have things match what the issue patch requests.
personality_plugins "all"
2017-10-05 19:00:42 -04:00
if ! declare -f "yetus_info" >/dev/null; then
function yetus_info
{
echo " [ $( date) INFO]: $* " 1>& 2
}
fi
2019-09-12 21:35:29 -04:00
# work around yetus overwriting JAVA_HOME from our docker image
function docker_do_env_adds
{
declare k
for k in " ${ DOCKER_EXTRAENVS [@] } " ; do
if [ [ "JAVA_HOME" = = " ${ k } " ] ] ; then
if [ -n " ${ JAVA_HOME } " ] ; then
DOCKER_EXTRAARGS += ( " --env=JAVA_HOME= ${ JAVA_HOME } " )
fi
else
DOCKER_EXTRAARGS += ( " --env= ${ k } = ${ !k } " )
fi
done
}
2016-06-17 13:54:30 -04:00
## @description Globals specific to this personality
## @audience private
## @stability evolving
2015-12-28 00:26:49 -05:00
function personality_globals
{
2016-06-17 13:54:30 -04:00
BUILDTOOL = maven
2015-12-28 00:26:49 -05:00
#shellcheck disable=SC2034
PROJECT_NAME = hbase
#shellcheck disable=SC2034
PATCH_BRANCH_DEFAULT = master
#shellcheck disable=SC2034
JIRA_ISSUE_RE = '^HBASE-[0-9]+$'
#shellcheck disable=SC2034
GITHUB_REPO = "apache/hbase"
# TODO use PATCH_BRANCH to select jdk versions to use.
2018-02-01 23:26:46 -05:00
# Yetus 0.7.0 enforces limits. Default proclimit is 1000.
# Up it. See HBASE-19902 for how we arrived at this number.
2018-06-14 14:00:08 -04:00
#shellcheck disable=SC2034
2020-04-13 17:59:16 -04:00
PROC_LIMIT = 12500
2018-02-01 23:26:46 -05:00
# Set docker container to run with 20g. Default is 4g in yetus.
# See HBASE-19902 for how we arrived at 20g.
2018-06-14 14:00:08 -04:00
#shellcheck disable=SC2034
2018-02-01 23:26:46 -05:00
DOCKERMEMLIMIT = 20g
2017-12-20 16:14:35 -05:00
}
2015-12-28 00:26:49 -05:00
2017-12-20 16:14:35 -05:00
## @description Parse extra arguments required by personalities, if any.
## @audience private
## @stability evolving
function personality_parse_args
{
declare i
for i in " $@ " ; do
case ${ i } in
--exclude-tests-url= *)
2019-09-03 20:52:28 -04:00
delete_parameter " ${ i } "
2017-12-20 16:14:35 -05:00
EXCLUDE_TESTS_URL = ${ i #*= }
; ;
--include-tests-url= *)
2019-09-03 20:52:28 -04:00
delete_parameter " ${ i } "
2017-12-20 16:14:35 -05:00
INCLUDE_TESTS_URL = ${ i #*= }
; ;
2017-12-21 14:20:40 -05:00
--hadoop-profile= *)
2019-09-03 20:52:28 -04:00
delete_parameter " ${ i } "
2017-12-21 14:20:40 -05:00
HADOOP_PROFILE = ${ i #*= }
; ;
2019-04-06 01:51:02 -04:00
--skip-errorprone)
2019-09-03 20:52:28 -04:00
delete_parameter " ${ i } "
2019-04-06 01:51:02 -04:00
SKIP_ERRORPRONE = true
; ;
2021-10-21 11:07:30 -04:00
--asf-nightlies-general-check-base= *)
delete_parameter " ${ i } "
ASF_NIGHTLIES_GENERAL_CHECK_BASE = ${ i #*= }
; ;
2022-03-14 00:01:51 -04:00
--build-thread= *
delete_parameter " ${ i } "
BUILD_THREAD = ${ i #*= }
; ;
--surefire-first-part-fork-count= *
delete_parameter " ${ i } "
SUREFIRE_FIRST_PART_FORK_COUNT = ${ i #*= }
; ;
--surefire-second-part-fork-count= *
delete_parameter " ${ i } "
SUREFIRE_SECOND_PART_FORK_COUNT = ${ i #*= }
; ;
2017-12-20 16:14:35 -05:00
esac
done
2015-12-28 00:26:49 -05:00
}
2016-06-17 13:54:30 -04:00
## @description Queue up modules for this personality
## @audience private
## @stability evolving
## @param repostatus
## @param testtype
2015-12-28 00:26:49 -05:00
function personality_modules
{
local repostatus = $1
local testtype = $2
local extra = ""
2019-11-08 11:07:24 -05:00
local branch1jdk8 = ( )
local jdk8module = ""
2018-06-14 14:00:08 -04:00
local MODULES = ( " ${ CHANGED_MODULES [@] } " )
2015-12-28 00:26:49 -05:00
2017-12-20 16:14:35 -05:00
yetus_info " Personality: ${ repostatus } ${ testtype } "
2015-12-28 00:26:49 -05:00
clear_personality_queue
2020-04-09 17:41:41 -04:00
# At a few points, hbase modules can run build, test, etc. in parallel
# Let it happen. Means we'll use more CPU but should be for short bursts.
# https://cwiki.apache.org/confluence/display/MAVEN/Parallel+builds+in+Maven+3
2022-03-14 00:01:51 -04:00
if [ [ -n " ${ BUILD_THREAD } " ] ] ; then
extra = " --threads= ${ BUILD_THREAD } "
else
extra = "--threads=2"
fi
extra = " ${ extra } -DHBasePatchProcess "
2018-08-19 23:29:46 -04:00
if [ [ " ${ PATCH_BRANCH } " = branch-1* ] ] ; then
extra = " ${ extra } -Dhttps.protocols=TLSv1.2 "
fi
2015-12-28 00:26:49 -05:00
2020-05-01 18:18:06 -04:00
# If we have HADOOP_PROFILE specified and we're on branch-2.x, pass along
# the hadoop.profile system property. Ensures that Hadoop2 and Hadoop3
# logic is not both activated within Maven.
2021-08-09 04:53:41 -04:00
if [ [ -n " ${ HADOOP_PROFILE } " ] ] && [ [ " ${ PATCH_BRANCH } " = branch-2* ] ] ; then
2020-05-01 18:18:06 -04:00
extra = " ${ extra } -Dhadoop.profile= ${ HADOOP_PROFILE } "
2017-12-21 14:20:40 -05:00
fi
2017-12-20 16:14:35 -05:00
# BUILDMODE value is 'full' when there is no patch to be tested, and we are running checks on
# full source code instead. In this case, do full compiles, tests, etc instead of per
# module.
# Used in nightly runs.
# If BUILDMODE is 'patch', for unit and compile testtypes, there is no need to run individual
# modules if root is included. HBASE-18505
if [ [ " ${ BUILDMODE } " = = "full" ] ] || \
2020-03-25 18:45:28 -04:00
{ { [ [ " ${ testtype } " = = unit ] ] || [ [ " ${ testtype } " = = compile ] ] || [ [ " ${ testtype } " = = checkstyle ] ] ; } && \
[ [ " ${ MODULES [*] } " = ~ \. ] ] ; } ; then
2017-12-20 16:14:35 -05:00
MODULES = ( .)
fi
2018-06-14 14:00:08 -04:00
# If the checkstyle configs change, check everything.
if [ [ " ${ testtype } " = = checkstyle ] ] && [ [ " ${ MODULES [*] } " = ~ hbase-checkstyle ] ] ; then
MODULES = ( .)
fi
2017-12-20 16:14:35 -05:00
if [ [ ${ testtype } = = mvninstall ] ] ; then
2017-12-21 14:20:40 -05:00
# shellcheck disable=SC2086
2016-06-17 13:54:30 -04:00
personality_enqueue_module . ${ extra }
return
2015-12-28 00:26:49 -05:00
fi
2019-11-08 11:07:24 -05:00
# This list should include any modules that require jdk8. Maven should be configured to only
# include them when a proper JDK is in use, but that doesn' work if we specifically ask for the
# module to build as yetus does if something changes in the module. Rather than try to
# figure out what jdk is in use so we can duplicate the module activation logic, just
# build at the top level if anything changes in one of these modules and let maven sort it out.
branch1jdk8 = ( hbase-error-prone hbase-tinylfu-blockcache)
if [ [ " ${ PATCH_BRANCH } " = branch-1* ] ] ; then
for jdk8module in " ${ branch1jdk8 [@] } " ; do
if [ [ " ${ MODULES [*] } " = ~ ${ jdk8module } ] ] ; then
MODULES = ( .)
break
fi
done
fi
2020-03-11 22:18:09 -04:00
if [ [ ${ testtype } = = spotbugs ] ] ; then
# Run spotbugs on each module individually to diff pre-patch and post-patch results and
2017-12-20 16:14:35 -05:00
# report new warnings for changed modules only.
2020-03-11 22:18:09 -04:00
# For some reason, spotbugs on root is not working, but running on individual modules is
2017-12-20 16:14:35 -05:00
# working. For time being, let it run on original list of CHANGED_MODULES. HBASE-19491
2016-06-17 13:54:30 -04:00
for module in " ${ CHANGED_MODULES [@] } " ; do
2020-03-25 18:45:28 -04:00
# skip spotbugs on any module that lacks content in `src/main/java`
if [ [ " $( find " ${ BASEDIR } / ${ module } " -iname '*.java' -and -ipath '*/src/main/java/*' \
-type f | wc -l | tr -d '[:space:]' ) " -eq 0 ]]; then
yetus_debug " no java files found under ${ module } /src/main/java. skipping. "
2020-03-11 22:18:09 -04:00
continue
2015-12-28 00:26:49 -05:00
else
# shellcheck disable=SC2086
personality_enqueue_module ${ module } ${ extra }
fi
done
return
fi
2019-11-08 11:07:24 -05:00
if [ [ ${ testtype } = = compile ] ] && [ [ " ${ SKIP_ERRORPRONE } " != "true" ] ] &&
[ [ " ${ PATCH_BRANCH } " != branch-1* ] ] ; then
2018-03-09 16:48:44 -05:00
extra = " ${ extra } -PerrorProne "
fi
2016-04-21 18:34:12 -04:00
# If EXCLUDE_TESTS_URL/INCLUDE_TESTS_URL is set, fetches the url
# and sets -Dtest.exclude.pattern/-Dtest to exclude/include the
# tests respectively.
2017-12-20 16:14:35 -05:00
if [ [ ${ testtype } = = unit ] ] ; then
local tests_arg = ""
get_include_exclude_tests_arg tests_arg
extra = " ${ extra } -PrunAllTests ${ tests_arg } "
2015-12-28 00:26:49 -05:00
# Inject the jenkins build-id for our surefire invocations
# Used by zombie detection stuff, even though we're not including that yet.
if [ -n " ${ BUILD_ID } " ] ; then
extra = " ${ extra } -Dbuild.id= ${ BUILD_ID } "
fi
2018-07-11 22:14:36 -04:00
2022-03-14 00:01:51 -04:00
# set forkCount
if [ [ -n " ${ SUREFIRE_FIRST_PART_FORK_COUNT } " ] ] ; then
extra = " ${ extra } -Dsurefire.firstPartForkCount= ${ SUREFIRE_FIRST_PART_FORK_COUNT } "
fi
if [ [ -n " ${ SUREFIRE_SECOND_PART_FORK_COUNT } " ] ] ; then
extra = " ${ extra } -Dsurefire.secondPartForkCount= ${ SUREFIRE_SECOND_PART_FORK_COUNT } "
fi
2018-07-11 22:14:36 -04:00
# If the set of changed files includes CommonFSUtils then add the hbase-server
# module to the set of modules (if not already included) to be tested
for f in " ${ CHANGED_FILES [@] } "
do
if [ [ " ${ f } " = ~ CommonFSUtils ] ] ; then
if [ [ ! " ${ MODULES [*] } " = ~ hbase-server ] ] && [ [ ! " ${ MODULES [*] } " = ~ \. ] ] ; then
MODULES += ( "hbase-server" )
fi
break
fi
done
2015-12-28 00:26:49 -05:00
fi
2017-12-20 16:14:35 -05:00
for module in " ${ MODULES [@] } " ; do
2015-12-28 00:26:49 -05:00
# shellcheck disable=SC2086
personality_enqueue_module ${ module } ${ extra }
done
}
2018-04-05 16:22:41 -04:00
## @description places where we override the built in assumptions about what tests to run
## @audience private
## @stability evolving
## @param filename of changed file
function personality_file_tests
{
local filename = $1
2018-06-14 14:00:08 -04:00
yetus_debug "HBase specific personality_file_tests"
2018-04-05 16:22:41 -04:00
# If the change is to the refguide, then we don't need any builtin yetus tests
# the refguide test (below) will suffice for coverage.
if [ [ ${ filename } = ~ src/main/asciidoc ] ] ||
[ [ ${ filename } = ~ src/main/xslt ] ] ; then
yetus_debug " Skipping builtin yetus checks for ${ filename } . refguide test should pick it up. "
2018-06-14 14:00:08 -04:00
else
# If we change our asciidoc, rebuild mvnsite
if [ [ ${ BUILDTOOL } = maven ] ] ; then
if [ [ ${ filename } = ~ src/site || ${ filename } = ~ src/main/asciidoc ] ] ; then
yetus_debug " tests/mvnsite: ${ filename } "
add_test mvnsite
fi
fi
# If we change checkstyle configs, run checkstyle
if [ [ ${ filename } = ~ checkstyle.*\. xml ] ] ; then
yetus_debug " tests/checkstyle: ${ filename } "
add_test checkstyle
fi
# fallback to checking which tests based on what yetus would do by default
if declare -f " ${ BUILDTOOL } _builtin_personality_file_tests " >/dev/null; then
" ${ BUILDTOOL } _builtin_personality_file_tests " " ${ filename } "
elif declare -f builtin_personality_file_tests >/dev/null; then
builtin_personality_file_tests " ${ filename } "
fi
2018-04-05 16:22:41 -04:00
fi
}
2017-12-20 16:14:35 -05:00
## @description Uses relevant include/exclude env variable to fetch list of included/excluded
# tests and sets given variable to arguments to be passes to maven command.
## @audience private
## @stability evolving
## @param name of variable to set with maven arguments
function get_include_exclude_tests_arg
{
local __resultvar = $1
yetus_info " EXCLUDE_TESTS_URL= ${ EXCLUDE_TESTS_URL } "
yetus_info " INCLUDE_TESTS_URL= ${ INCLUDE_TESTS_URL } "
if [ [ -n " ${ EXCLUDE_TESTS_URL } " ] ] ; then
if wget " ${ EXCLUDE_TESTS_URL } " -O "excludes" ; then
excludes = $( cat excludes)
yetus_debug " excludes= ${ excludes } "
if [ [ -n " ${ excludes } " ] ] ; then
eval " ${ __resultvar } ='-Dtest.exclude.pattern= ${ excludes } ' "
fi
rm excludes
else
yetus_error " Wget error $? in fetching excludes file from url " \
" ${ EXCLUDE_TESTS_URL } . Ignoring and proceeding. "
fi
elif [ [ -n " $INCLUDE_TESTS_URL " ] ] ; then
if wget " $INCLUDE_TESTS_URL " -O "includes" ; then
includes = $( cat includes)
yetus_debug " includes= ${ includes } "
if [ [ -n " ${ includes } " ] ] ; then
eval " ${ __resultvar } ='-Dtest= ${ includes } ' "
fi
rm includes
else
yetus_error " Wget error $? in fetching includes file from url " \
" ${ INCLUDE_TESTS_URL } . Ignoring and proceeding. "
fi
2018-12-28 09:16:48 -05:00
else
# Use branch specific exclude list when EXCLUDE_TESTS_URL and INCLUDE_TESTS_URL are empty
2020-10-20 05:00:06 -04:00
FLAKY_URL = " https://ci-hadoop.apache.org/job/HBase/job/HBase-Find-Flaky-Tests/job/ ${ PATCH_BRANCH } /lastSuccessfulBuild/artifact/output/excludes "
2018-12-28 09:16:48 -05:00
if wget " ${ FLAKY_URL } " -O "excludes" ; then
excludes = $( cat excludes)
yetus_debug " excludes= ${ excludes } "
if [ [ -n " ${ excludes } " ] ] ; then
eval " ${ __resultvar } ='-Dtest.exclude.pattern= ${ excludes } ' "
fi
rm excludes
else
yetus_error " Wget error $? in fetching excludes file from url " \
" ${ FLAKY_URL } . Ignoring and proceeding. "
fi
2017-12-20 16:14:35 -05:00
fi
}
2015-12-28 00:26:49 -05:00
###################################################
# Below here are our one-off tests specific to hbase.
# TODO break them into individual files so it's easier to maintain them?
# TODO line length check? could ignore all java files since checkstyle gets them.
###################################################
2018-04-05 16:22:41 -04:00
add_test_type refguide
function refguide_initialize
{
maven_add_install refguide
}
function refguide_filefilter
{
local filename = $1
if [ [ ${ filename } = ~ src/main/asciidoc ] ] ||
[ [ ${ filename } = ~ src/main/xslt ] ] ||
2021-08-09 04:53:41 -04:00
[ [ ${ filename } = ~ hbase-common/src/main/resources/hbase-default\. xml ] ] ; then
2018-04-05 16:22:41 -04:00
add_test refguide
fi
}
function refguide_rebuild
{
local repostatus = $1
local logfile = " ${ PATCH_DIR } / ${ repostatus } -refguide.log "
declare -i count
2018-08-15 14:16:16 -04:00
declare pdf_output
2018-04-05 16:22:41 -04:00
if ! verify_needed_test refguide; then
return 0
fi
big_console_header " Checking we can create the ref guide on ${ repostatus } "
start_clock
2018-04-08 22:27:58 -04:00
# disabled because "maven_executor" needs to return both command and args
# shellcheck disable=2046
2018-04-05 16:22:41 -04:00
echo_and_redirect " ${ logfile } " \
2018-04-08 22:27:58 -04:00
$( maven_executor) clean site --batch-mode \
2018-04-05 16:22:41 -04:00
-pl . \
-Dtest= NoUnitTests -DHBasePatchProcess -Prelease \
2020-03-11 22:18:09 -04:00
-Dmaven.javadoc.skip= true -Dcheckstyle.skip= true -Dspotbugs.skip= true
2018-04-05 16:22:41 -04:00
count = $( ${ GREP } -c '\[ERROR\]' " ${ logfile } " )
if [ [ ${ count } -gt 0 ] ] ; then
add_vote_table -1 refguide " ${ repostatus } has ${ count } errors when building the reference guide. "
add_footer_table refguide " @@BASE@@/ ${ repostatus } -refguide.log "
return 1
fi
if ! mv target/site " ${ PATCH_DIR } / ${ repostatus } -site " ; then
add_vote_table -1 refguide " ${ repostatus } failed to produce a site directory. "
add_footer_table refguide " @@BASE@@/ ${ repostatus } -refguide.log "
return 1
fi
if [ [ ! -f " ${ PATCH_DIR } / ${ repostatus } -site/book.html " ] ] ; then
add_vote_table -1 refguide " ${ repostatus } failed to produce the html version of the reference guide. "
add_footer_table refguide " @@BASE@@/ ${ repostatus } -refguide.log "
return 1
fi
2018-08-15 14:16:16 -04:00
if [ [ " ${ PATCH_BRANCH } " = branch-1* ] ] ; then
pdf_output = "book.pdf"
else
pdf_output = "apache_hbase_reference_guide.pdf"
fi
if [ [ ! -f " ${ PATCH_DIR } / ${ repostatus } -site/ ${ pdf_output } " ] ] ; then
2018-04-05 16:22:41 -04:00
add_vote_table -1 refguide " ${ repostatus } failed to produce the pdf version of the reference guide. "
add_footer_table refguide " @@BASE@@/ ${ repostatus } -refguide.log "
return 1
fi
add_vote_table 0 refguide " ${ repostatus } has no errors when building the reference guide. See footer for rendered docs, which you should manually inspect. "
2021-10-21 11:07:30 -04:00
if [ [ -n " ${ ASF_NIGHTLIES_GENERAL_CHECK_BASE } " ] ] ; then
add_footer_table refguide " ${ ASF_NIGHTLIES_GENERAL_CHECK_BASE } / ${ repostatus } -site/book.html "
else
add_footer_table refguide " @@BASE@@/ ${ repostatus } -site/book.html "
fi
2018-04-05 16:22:41 -04:00
return 0
}
2017-09-18 09:58:09 -04:00
add_test_type shadedjars
function shadedjars_initialize
{
yetus_debug "initializing shaded client checks."
maven_add_install shadedjars
}
2017-10-20 15:39:03 -04:00
## @description only run the test if java changes.
## @audience private
## @stability evolving
## @param filename
function shadedjars_filefilter
2017-09-18 09:58:09 -04:00
{
2017-10-20 15:39:03 -04:00
local filename = $1
if [ [ ${ filename } = ~ \. java$ ] ] || [ [ ${ filename } = ~ pom.xml$ ] ] ; then
add_test shadedjars
fi
2017-09-18 09:58:09 -04:00
}
## @description test the shaded client artifacts
## @audience private
## @stability evolving
## @param repostatus
function shadedjars_rebuild
{
local repostatus = $1
local logfile = " ${ PATCH_DIR } / ${ repostatus } -shadedjars.txt "
2017-10-20 15:39:03 -04:00
if ! verify_needed_test shadedjars; then
return 0
fi
2017-09-18 09:58:09 -04:00
big_console_header " Checking shaded client builds on ${ repostatus } "
2018-04-05 16:22:41 -04:00
start_clock
2020-03-17 15:32:48 -04:00
local -a maven_args = ( 'clean' 'verify' '-fae' '--batch-mode'
'-pl' 'hbase-shaded/hbase-shaded-check-invariants' '-am'
'-Dtest=NoUnitTests' '-DHBasePatchProcess' '-Prelease'
'-Dmaven.javadoc.skip=true' '-Dcheckstyle.skip=true' '-Dspotbugs.skip=true' )
2020-05-01 18:18:06 -04:00
# If we have HADOOP_PROFILE specified and we're on branch-2.x, pass along
# the hadoop.profile system property. Ensures that Hadoop2 and Hadoop3
# logic is not both activated within Maven.
2021-08-09 04:53:41 -04:00
if [ [ -n " ${ HADOOP_PROFILE } " ] ] && [ [ " ${ PATCH_BRANCH } " = branch-2* ] ] ; then
2020-05-01 18:18:06 -04:00
maven_args += ( " -Dhadoop.profile= ${ HADOOP_PROFILE } " )
2020-03-17 15:32:48 -04:00
fi
2018-04-08 22:27:58 -04:00
# disabled because "maven_executor" needs to return both command and args
# shellcheck disable=2046
2020-03-17 15:32:48 -04:00
echo_and_redirect " ${ logfile } " $( maven_executor) " ${ maven_args [@] } "
2017-09-18 09:58:09 -04:00
count = $( ${ GREP } -c '\[ERROR\]' " ${ logfile } " )
if [ [ ${ count } -gt 0 ] ] ; then
add_vote_table -1 shadedjars " ${ repostatus } has ${ count } errors when building our shaded downstream artifacts. "
2018-04-12 17:31:24 -04:00
add_footer_table shadedjars " @@BASE@@/ ${ repostatus } -shadedjars.txt "
2017-09-18 09:58:09 -04:00
return 1
fi
add_vote_table +1 shadedjars " ${ repostatus } has no errors when building our shaded downstream artifacts. "
return 0
}
###################################################
2015-12-28 00:26:49 -05:00
add_test_type hadoopcheck
2016-06-17 13:54:30 -04:00
## @description hadoopcheck file filter
## @audience private
## @stability evolving
## @param filename
2015-12-28 00:26:49 -05:00
function hadoopcheck_filefilter
{
local filename = $1
2021-08-09 04:53:41 -04:00
if [ [ ${ filename } = ~ \. java$ ] ] || [ [ ${ filename } = ~ pom\. xml$ ] ] ; then
2015-12-28 00:26:49 -05:00
add_test hadoopcheck
fi
}
2017-12-13 02:22:04 -05:00
## @description Parse args to detect if QUICK_HADOOPCHECK mode is enabled.
## @audience private
## @stability evolving
function hadoopcheck_parse_args
{
declare i
for i in " $@ " ; do
case ${ i } in
--quick-hadoopcheck)
2019-09-03 20:52:28 -04:00
delete_parameter " ${ i } "
2017-12-13 02:22:04 -05:00
QUICK_HADOOPCHECK = true
; ;
esac
done
}
## @description Adds QUICK_HADOOPCHECK env variable to DOCKER_EXTRAARGS.
## @audience private
## @stability evolving
function hadoopcheck_docker_support
{
2020-04-06 21:23:35 -04:00
DOCKER_EXTRAARGS = ( " ${ DOCKER_EXTRAARGS [@] } " " --env=QUICK_HADOOPCHECK= ${ QUICK_HADOOPCHECK } " )
2017-12-13 02:22:04 -05:00
}
2016-06-17 13:54:30 -04:00
## @description hadoopcheck test
## @audience private
## @stability evolving
## @param repostatus
2015-12-28 00:26:49 -05:00
function hadoopcheck_rebuild
{
local repostatus = $1
local hadoopver
local logfile
local count
local result = 0
2016-10-23 20:46:27 -04:00
local hbase_hadoop2_versions
local hbase_hadoop3_versions
2015-12-28 00:26:49 -05:00
if [ [ " ${ repostatus } " = branch ] ] ; then
return 0
fi
2017-10-20 12:08:35 -04:00
if ! verify_needed_test hadoopcheck; then
return 0
fi
2015-12-28 00:26:49 -05:00
big_console_header "Compiling against various Hadoop versions"
2018-04-05 16:22:41 -04:00
start_clock
2017-10-05 19:00:42 -04:00
# All supported Hadoop versions that we want to test the compilation with
# See the Hadoop section on prereqs in the HBase Reference Guide
2020-03-20 02:18:07 -04:00
if [ [ " ${ PATCH_BRANCH } " = branch-1.3 ] ] ; then
yetus_info "Setting Hadoop 2 versions to test based on branch-1.3 rules."
2017-12-13 02:22:04 -05:00
if [ [ " ${ QUICK_HADOOPCHECK } " = = "true" ] ] ; then
2019-05-12 22:30:13 -04:00
hbase_hadoop2_versions = "2.4.1 2.5.2 2.6.5 2.7.7"
2017-12-13 02:22:04 -05:00
else
2019-05-12 22:30:13 -04:00
hbase_hadoop2_versions = "2.4.0 2.4.1 2.5.0 2.5.1 2.5.2 2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7"
2017-12-13 02:22:04 -05:00
fi
2019-06-01 03:03:28 -04:00
elif [ [ " ${ PATCH_BRANCH } " = branch-1.4 ] ] ; then
yetus_info "Setting Hadoop 2 versions to test based on branch-1.4 rules."
if [ [ " ${ QUICK_HADOOPCHECK } " = = "true" ] ] ; then
hbase_hadoop2_versions = "2.7.7"
else
hbase_hadoop2_versions = "2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7"
fi
2020-03-20 02:18:07 -04:00
elif [ [ " ${ PATCH_BRANCH } " = branch-1 ] ] ; then
yetus_info "Setting Hadoop 2 versions to test based on branch-1 rules."
if [ [ " ${ QUICK_HADOOPCHECK } " = = "true" ] ] ; then
2020-11-17 09:49:00 -05:00
hbase_hadoop2_versions = "2.10.0"
2020-03-20 02:18:07 -04:00
else
2020-11-17 09:49:00 -05:00
hbase_hadoop2_versions = "2.10.0"
2020-03-20 02:18:07 -04:00
fi
2018-05-30 22:22:42 -04:00
elif [ [ " ${ PATCH_BRANCH } " = branch-2.0 ] ] ; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.0 rules."
2017-12-13 02:22:04 -05:00
if [ [ " ${ QUICK_HADOOPCHECK } " = = "true" ] ] ; then
2019-05-12 22:30:13 -04:00
hbase_hadoop2_versions = "2.6.5 2.7.7 2.8.5"
2018-05-30 22:22:42 -04:00
else
2019-05-12 22:30:13 -04:00
hbase_hadoop2_versions = "2.6.1 2.6.2 2.6.3 2.6.4 2.6.5 2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5"
fi
elif [ [ " ${ PATCH_BRANCH } " = branch-2.1 ] ] ; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.1 rules."
if [ [ " ${ QUICK_HADOOPCHECK } " = = "true" ] ] ; then
hbase_hadoop2_versions = "2.7.7 2.8.5"
else
hbase_hadoop2_versions = "2.7.1 2.7.2 2.7.3 2.7.4 2.7.5 2.7.6 2.7.7 2.8.2 2.8.3 2.8.4 2.8.5"
2018-05-30 22:22:42 -04:00
fi
2020-03-20 02:18:07 -04:00
elif [ [ " ${ PATCH_BRANCH } " = branch-2.2 ] ] ; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.2 rules."
if [ [ " ${ QUICK_HADOOPCHECK } " = = "true" ] ] ; then
hbase_hadoop2_versions = "2.8.5 2.9.2 2.10.0"
else
hbase_hadoop2_versions = "2.8.5 2.9.2 2.10.0"
fi
2020-04-21 22:26:10 -04:00
elif [ [ " ${ PATCH_BRANCH } " = branch-2.* ] ] ; then
yetus_info "Setting Hadoop 2 versions to test based on branch-2.3+ rules."
2018-05-30 22:22:42 -04:00
if [ [ " ${ QUICK_HADOOPCHECK } " = = "true" ] ] ; then
2020-03-20 02:18:07 -04:00
hbase_hadoop2_versions = "2.10.0"
2017-12-13 02:22:04 -05:00
else
2020-03-20 02:18:07 -04:00
hbase_hadoop2_versions = "2.10.0"
2017-12-13 02:22:04 -05:00
fi
2020-04-21 22:26:10 -04:00
else
yetus_info "Setting Hadoop 2 versions to null on master/feature branch rules since we do not support hadoop 2 for hbase 3.x any more."
hbase_hadoop2_versions = ""
2018-05-30 22:22:42 -04:00
fi
if [ [ " ${ PATCH_BRANCH } " = branch-1* ] ] ; then
2019-05-15 04:34:52 -04:00
yetus_info "Setting Hadoop 3 versions to test based on branch-1.x rules."
2018-05-30 22:22:42 -04:00
hbase_hadoop3_versions = ""
2019-05-29 22:17:13 -04:00
elif [ [ " ${ PATCH_BRANCH } " = branch-2.0 ] ] || [ [ " ${ PATCH_BRANCH } " = branch-2.1 ] ] ; then
yetus_info "Setting Hadoop 3 versions to test based on branch-2.0/branch-2.1 rules"
2019-05-15 04:34:52 -04:00
if [ [ " ${ QUICK_HADOOPCHECK } " = = "true" ] ] ; then
hbase_hadoop3_versions = "3.0.3 3.1.2"
else
2019-05-18 09:18:33 -04:00
hbase_hadoop3_versions = "3.0.3 3.1.1 3.1.2"
2019-05-15 04:34:52 -04:00
fi
2019-05-29 22:17:13 -04:00
else
yetus_info "Setting Hadoop 3 versions to test based on branch-2.2+/master/feature branch rules"
if [ [ " ${ QUICK_HADOOPCHECK } " = = "true" ] ] ; then
2020-04-21 22:51:46 -04:00
hbase_hadoop3_versions = "3.1.2 3.2.1"
2019-05-29 22:17:13 -04:00
else
2020-04-21 22:51:46 -04:00
hbase_hadoop3_versions = "3.1.1 3.1.2 3.2.0 3.2.1"
2019-05-29 22:17:13 -04:00
fi
2017-10-05 19:00:42 -04:00
fi
2016-10-23 20:46:27 -04:00
2015-12-28 00:26:49 -05:00
export MAVEN_OPTS = " ${ MAVEN_OPTS } "
2016-10-23 20:46:27 -04:00
for hadoopver in ${ hbase_hadoop2_versions } ; do
2015-12-28 00:26:49 -05:00
logfile = " ${ PATCH_DIR } /patch-javac- ${ hadoopver } .txt "
2018-04-08 22:27:58 -04:00
# disabled because "maven_executor" needs to return both command and args
# shellcheck disable=2046
2015-12-28 00:26:49 -05:00
echo_and_redirect " ${ logfile } " \
2018-04-08 22:27:58 -04:00
$( maven_executor) clean install \
2015-12-28 00:26:49 -05:00
-DskipTests -DHBasePatchProcess \
-Dhadoop-two.version= " ${ hadoopver } "
2016-12-02 00:42:19 -05:00
count = $( ${ GREP } -c '\[ERROR\]' " ${ logfile } " )
2015-12-28 00:26:49 -05:00
if [ [ ${ count } -gt 0 ] ] ; then
2016-06-17 13:54:30 -04:00
add_vote_table -1 hadoopcheck " ${ BUILDMODEMSG } causes ${ count } errors with Hadoop v ${ hadoopver } . "
2018-03-08 22:55:25 -05:00
add_footer_table hadoopcheck " @@BASE@@/patch-javac- ${ hadoopver } .txt "
2015-12-28 00:26:49 -05:00
( ( result = result+1) )
fi
done
2020-05-01 18:18:06 -04:00
hadoop_profile = ""
2021-08-09 04:53:41 -04:00
if [ [ " ${ PATCH_BRANCH } " = branch-2* ] ] ; then
2020-05-01 18:18:06 -04:00
hadoop_profile = "-Dhadoop.profile=3.0"
fi
2016-10-23 20:46:27 -04:00
for hadoopver in ${ hbase_hadoop3_versions } ; do
2016-09-29 17:56:01 -04:00
logfile = " ${ PATCH_DIR } /patch-javac- ${ hadoopver } .txt "
2018-04-08 22:27:58 -04:00
# disabled because "maven_executor" needs to return both command and args
# shellcheck disable=2046
2016-09-29 17:56:01 -04:00
echo_and_redirect " ${ logfile } " \
2018-04-08 22:27:58 -04:00
$( maven_executor) clean install \
2016-09-29 17:56:01 -04:00
-DskipTests -DHBasePatchProcess \
2017-09-28 17:49:00 -04:00
-Dhadoop-three.version= " ${ hadoopver } " \
2020-05-01 18:18:06 -04:00
${ hadoop_profile }
2016-12-02 00:42:19 -05:00
count = $( ${ GREP } -c '\[ERROR\]' " ${ logfile } " )
2016-09-29 17:56:01 -04:00
if [ [ ${ count } -gt 0 ] ] ; then
add_vote_table -1 hadoopcheck " ${ BUILDMODEMSG } causes ${ count } errors with Hadoop v ${ hadoopver } . "
2018-03-08 22:55:25 -05:00
add_footer_table hadoopcheck " @@BASE@@/patch-javac- ${ hadoopver } .txt "
2016-09-29 17:56:01 -04:00
( ( result = result+1) )
fi
done
2015-12-28 00:26:49 -05:00
if [ [ ${ result } -gt 0 ] ] ; then
return 1
fi
2016-10-23 20:46:27 -04:00
if [ [ -n " ${ hbase_hadoop3_versions } " ] ] ; then
2020-04-21 22:26:10 -04:00
if [ [ -n " ${ hbase_hadoop2_versions } " ] ] ; then
add_vote_table +1 hadoopcheck " Patch does not cause any errors with Hadoop ${ hbase_hadoop2_versions } or ${ hbase_hadoop3_versions } . "
else
add_vote_table +1 hadoopcheck " Patch does not cause any errors with Hadoop ${ hbase_hadoop3_versions } . "
fi
2016-10-23 20:46:27 -04:00
else
add_vote_table +1 hadoopcheck " Patch does not cause any errors with Hadoop ${ hbase_hadoop2_versions } . "
fi
2019-05-23 02:07:46 -04:00
logfile = " ${ PATCH_DIR } /patch-install-after-hadoopcheck.txt "
echo_and_redirect " ${ logfile } " \
2019-05-23 21:23:20 -04:00
$( maven_executor) clean install \
2019-05-23 02:07:46 -04:00
-DskipTests -DHBasePatchProcess
2019-05-23 21:23:20 -04:00
2015-12-28 00:26:49 -05:00
return 0
}
######################################
2016-06-17 13:54:30 -04:00
# TODO if we need the protoc check, we probably need to check building all the modules that rely on hbase-protocol
2015-12-28 00:26:49 -05:00
add_test_type hbaseprotoc
2021-12-14 10:01:14 -05:00
function hbaseprotoc_initialize
{
# So long as there are inter-module dependencies on the protoc modules, we
# need to run a full `mvn install` before a patch can be tested.
yetus_debug "initializing HBase Protoc plugin."
maven_add_install hbaseprotoc
}
2016-06-17 13:54:30 -04:00
## @description hbaseprotoc file filter
## @audience private
## @stability evolving
## @param filename
2015-12-28 00:26:49 -05:00
function hbaseprotoc_filefilter
{
local filename = $1
if [ [ ${ filename } = ~ \. proto$ ] ] ; then
add_test hbaseprotoc
fi
}
2017-10-20 12:08:35 -04:00
## @description check hbase proto compilation
2016-06-17 13:54:30 -04:00
## @audience private
## @stability evolving
## @param repostatus
2015-12-28 00:26:49 -05:00
function hbaseprotoc_rebuild
{
2016-06-17 13:54:30 -04:00
declare repostatus = $1
declare i = 0
declare fn
declare module
declare logfile
declare count
declare result
2015-12-28 00:26:49 -05:00
if [ [ " ${ repostatus } " = branch ] ] ; then
return 0
fi
2016-10-28 18:30:22 -04:00
if ! verify_needed_test hbaseprotoc; then
2015-12-28 00:26:49 -05:00
return 0
fi
2016-06-17 13:54:30 -04:00
big_console_header " HBase protoc plugin: ${ BUILDMODE } "
2015-12-28 00:26:49 -05:00
start_clock
personality_modules patch hbaseprotoc
2016-11-08 15:49:00 -05:00
# Need to run 'install' instead of 'compile' because shading plugin
# is hooked-up to 'install'; else hbase-protocol-shaded is left with
# half of its process done.
2018-04-11 22:25:04 -04:00
modules_workers patch hbaseprotoc install -DskipTests -X -DHBasePatchProcess
2015-12-28 00:26:49 -05:00
# shellcheck disable=SC2153
2016-06-17 13:54:30 -04:00
until [ [ $i -eq " ${# MODULE [@] } " ] ] ; do
2015-12-28 00:26:49 -05:00
if [ [ ${ MODULE_STATUS [ ${ i } ] } = = -1 ] ] ; then
( ( result = result+1) )
( ( i = i+1) )
continue
fi
module = ${ MODULE [ $i ] }
fn = $( module_file_fragment " ${ module } " )
logfile = " ${ PATCH_DIR } /patch-hbaseprotoc- ${ fn } .txt "
2016-12-02 00:42:19 -05:00
count = $( ${ GREP } -c '\[ERROR\]' " ${ logfile } " )
2015-12-28 00:26:49 -05:00
if [ [ ${ count } -gt 0 ] ] ; then
module_status ${ i } -1 " patch-hbaseprotoc- ${ fn } .txt " "Patch generated " \
" ${ count } new protoc errors in ${ module } . "
( ( result = result+1) )
fi
( ( i = i+1) )
done
modules_messages patch hbaseprotoc true
if [ [ ${ result } -gt 0 ] ] ; then
return 1
fi
return 0
}
######################################
add_test_type hbaseanti
2016-06-17 13:54:30 -04:00
## @description hbaseanti file filter
## @audience private
## @stability evolving
## @param filename
2015-12-28 00:26:49 -05:00
function hbaseanti_filefilter
{
local filename = $1
if [ [ ${ filename } = ~ \. java$ ] ] ; then
add_test hbaseanti
fi
}
2016-06-17 13:54:30 -04:00
## @description hbaseanti patch file check
## @audience private
## @stability evolving
## @param filename
2015-12-28 00:26:49 -05:00
function hbaseanti_patchfile
{
local patchfile = $1
local warnings
local result
2016-06-17 13:54:30 -04:00
if [ [ " ${ BUILDMODE } " = full ] ] ; then
return 0
fi
2016-10-28 18:30:22 -04:00
if ! verify_needed_test hbaseanti; then
2015-12-28 00:26:49 -05:00
return 0
fi
big_console_header "Checking for known anti-patterns"
start_clock
2017-11-13 17:12:32 -05:00
warnings = $( ${ GREP } -c 'new TreeMap<byte.*()' " ${ patchfile } " )
2015-12-28 00:26:49 -05:00
if [ [ ${ warnings } -gt 0 ] ] ; then
2017-11-13 17:12:32 -05:00
add_vote_table -1 hbaseanti "" "The patch appears to have anti-pattern where BYTES_COMPARATOR was omitted."
2015-12-28 00:26:49 -05:00
( ( result = result+1) )
fi
if [ [ ${ result } -gt 0 ] ] ; then
return 1
fi
add_vote_table +1 hbaseanti "" "Patch does not have any anti-patterns."
return 0
}
2019-03-27 05:03:02 -04:00
## @description process the javac output for generating WARNING/ERROR
## @audience private
## @stability evolving
## @param input filename
## @param output filename
# Override the default javac_logfilter so that we can do a sort before outputing the WARNING/ERROR.
# This is because that the output order of the error prone warnings is not stable, so the diff
# method will report unexpected errors if we do not sort it. Notice that a simple sort will cause
# line number being sorted by lexicographical so the output maybe a bit strange to human but it is
# really hard to sort by file name first and then line number and column number in shell...
function hbase_javac_logfilter
{
declare input = $1
declare output = $2
${ GREP } -E '\[(ERROR|WARNING)\] /.*\.java:' " ${ input } " | sort > " ${ output } "
}
2015-12-28 00:26:49 -05:00
## This is named so that yetus will check us right after running tests.
## Essentially, we check for normal failures and then we look for zombies.
#function hbase_unit_logfilter
#{
# declare testtype="unit"
# declare input=$1
# declare output=$2
# declare processes
# declare process_output
# declare zombies
# declare zombie_count=0
# declare zombie_process
#
# yetus_debug "in hbase-specific unit logfilter."
#
# # pass-through to whatever is counting actual failures
# if declare -f ${BUILDTOOL}_${testtype}_logfilter >/dev/null; then
# "${BUILDTOOL}_${testtype}_logfilter" "${input}" "${output}"
# elif declare -f ${testtype}_logfilter >/dev/null; then
# "${testtype}_logfilter" "${input}" "${output}"
# fi
#
# start_clock
# if [ -n "${BUILD_ID}" ]; then
# yetus_debug "Checking for zombie test processes."
# processes=$(jps -v | "${GREP}" surefirebooter | "${GREP}" -e "hbase.build.id=${BUILD_ID}")
# if [ -n "${processes}" ] && [ "$(echo "${processes}" | wc -l)" -gt 0 ]; then
# yetus_warn "Found some suspicious process(es). Waiting a bit to see if they're just slow to stop."
# yetus_debug "${processes}"
# sleep 30
# #shellcheck disable=SC2016
# for pid in $(echo "${processes}"| ${AWK} '{print $1}'); do
# # Test our zombie still running (and that it still an hbase build item)
# process_output=$(ps -p "${pid}" | tail +2 | "${GREP}" -e "hbase.build.id=${BUILD_ID}")
# if [[ -n "${process_output}" ]]; then
# yetus_error "Zombie: ${process_output}"
# ((zombie_count = zombie_count + 1))
# zombie_process=$(jstack "${pid}" | "${GREP}" -e "\.Test" | "${GREP}" -e "\.java"| head -3)
# zombies="${zombies} ${zombie_process}"
# fi
# done
# fi
# if [ "${zombie_count}" -ne 0 ]; then
# add_vote_table -1 zombies "There are ${zombie_count} zombie test(s)"
# populate_test_table "zombie unit tests" "${zombies}"
# else
# yetus_info "Zombie check complete. All test runs exited normally."
# stop_clock
# fi
# else
# add_vote_table -0 zombies "There is no BUILD_ID env variable; can't check for zombies."
# fi
#
#}