HBASE-27359 Publish binaries and maven artifacts for both hadoop2 and hadoop3 (#4856)

Signed-off-by: Xiaolin Ha <haxiaolin@apache.org>
This commit is contained in:
Duo Zhang 2022-12-05 12:04:07 +08:00 committed by GitHub
parent 31bb688c20
commit febe511750
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 148 additions and 52 deletions

View File

@ -21,7 +21,6 @@
# * Java 8 # * Java 8
FROM ubuntu:22.04 FROM ubuntu:22.04
# Install extra needed repos and refresh. # Install extra needed repos and refresh.
# #
# This is all in a single "RUN" command so that if anything changes, "apt update" is run to fetch # This is all in a single "RUN" command so that if anything changes, "apt update" is run to fetch
@ -39,6 +38,7 @@ RUN DEBIAN_FRONTEND=noninteractive apt-get -qq -y update \
python3-pip='22.0.2+dfsg-*' \ python3-pip='22.0.2+dfsg-*' \
subversion='1.14.1-*' \ subversion='1.14.1-*' \
wget='1.21.2-*' \ wget='1.21.2-*' \
patch='2.7.6-*' \
&& apt-get clean \ && apt-get clean \
&& rm -rf /var/lib/apt/lists/* \ && rm -rf /var/lib/apt/lists/* \
&& update-alternatives --set java /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java \ && update-alternatives --set java /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java \

View File

@ -31,6 +31,7 @@ with open(sys.argv[1], 'r', errors = 'ignore') as new_r, open(sys.argv[2], 'r',
if pattern.match(line): if pattern.match(line):
break break
line = prev_r.readline() line = prev_r.readline()
w.writelines('# RELEASENOTES')
for newline in new_r: for newline in new_r:
w.writelines(newline) w.writelines(newline)
while line: while line:

View File

@ -130,11 +130,13 @@ if [[ "$1" == "tag" ]]; then
# So, here we prepend the project name to the version, but only for the hbase sub-projects. # So, here we prepend the project name to the version, but only for the hbase sub-projects.
jira_fix_version="${RELEASE_VERSION}" jira_fix_version="${RELEASE_VERSION}"
shopt -s nocasematch shopt -s nocasematch
if [[ "${PROJECT}" =~ ^hbase- ]]; then if [[ "${PROJECT}" == "hbase-thirdparty" ]]; then
jira_fix_version="thirdparty-${RELEASE_VERSION}"
elif [[ "${PROJECT}" =~ ^hbase- ]]; then
jira_fix_version="${PROJECT}-${RELEASE_VERSION}" jira_fix_version="${PROJECT}-${RELEASE_VERSION}"
fi fi
shopt -u nocasematch shopt -u nocasematch
update_releasenotes "$(pwd)/${PROJECT}" "${jira_fix_version}" "${PREV_VERSION}" update_releasenotes "$(pwd)/${PROJECT}" "${jira_fix_version}"
cd "${PROJECT}" cd "${PROJECT}"
@ -302,15 +304,31 @@ if [[ "$1" == "publish-release" ]]; then
mvn_log="${BASE_DIR}/mvn_deploy_release.log" mvn_log="${BASE_DIR}/mvn_deploy_release.log"
log "Staging release in nexus" log "Staging release in nexus"
maven_deploy release "$mvn_log" maven_deploy release "$mvn_log"
declare staged_repo_id="dryrun-no-repo" declare staged_repo_id
declare hadoop3_staged_repo_id
if ! is_dry_run; then if ! is_dry_run; then
staged_repo_id=$(grep -o "Closing staging repository with ID .*" "$mvn_log" \ mapfile -t staged_repo_ids < <(grep -o "Closing staging repository with ID .*" "$mvn_log" \
| sed -e 's/Closing staging repository with ID "\([^"]*\)"./\1/') | sed -e 's/Closing staging repository with ID "\([^"]*\)"./\1/')
log "Release artifacts successfully published to repo ${staged_repo_id}" log "Release artifacts successfully published to repo: " "${staged_repo_ids[@]}"
repo_count="${#staged_repo_ids[@]}"
if [[ "${repo_count}" == "2" ]]; then
staged_repo_id=${staged_repo_ids[0]}
hadoop3_staged_repo_id=${staged_repo_ids[1]}
elif [[ "${repo_count}" == "1" ]]; then
staged_repo_id=${staged_repo_ids[0]}
hadoop3_staged_repo_id="not-applicable"
else
staged_repo_id="not-applicable"
hadoop3_staged_repo_id="not-applicable"
fi
rm "$mvn_log" rm "$mvn_log"
else else
log "Dry run: Release artifacts successfully built, but not published due to dry run." log "Dry run: Release artifacts successfully built, but not published due to dry run."
staged_repo_id="dryrun-no-repo"
hadoop3_staged_repo_id="dryrun-no-repo"
fi fi
export staged_repo_id
export hadoop3_staged_repo_id
# Dump out email to send. Where we find vote.tmpl depends # Dump out email to send. Where we find vote.tmpl depends
# on where this script is run from # on where this script is run from
PROJECT_TEXT="${PROJECT//-/ }" #substitute like 's/-/ /g' PROJECT_TEXT="${PROJECT//-/ }" #substitute like 's/-/ /g'

View File

@ -75,7 +75,7 @@ function banner {
} }
function log { function log {
echo "$(date -u +"%Y-%m-%dT%H:%M:%SZ") ${1}" echo "$(date -u +"%Y-%m-%dT%H:%M:%SZ") $*"
} }
# current number of seconds since epoch # current number of seconds since epoch
@ -298,13 +298,12 @@ EOF
fi fi
GPG_ARGS=("${GPG_ARGS[@]}" --local-user "${GPG_KEY}") GPG_ARGS=("${GPG_ARGS[@]}" --local-user "${GPG_KEY}")
if ! is_dry_run; then # The nexus staging plugin needs the password to contact to remote server even if
# skipRemoteStaging is set to true, not sure why so here we need the password even
# if this is a dry run
if [ -z "$ASF_PASSWORD" ]; then if [ -z "$ASF_PASSWORD" ]; then
stty -echo && printf "ASF_PASSWORD: " && read -r ASF_PASSWORD && printf '\n' && stty echo stty -echo && printf "ASF_PASSWORD: " && read -r ASF_PASSWORD && printf '\n' && stty echo
fi fi
else
ASF_PASSWORD="***INVALID***"
fi
export ASF_PASSWORD export ASF_PASSWORD
} }
@ -561,7 +560,6 @@ function get_jira_name {
function update_releasenotes { function update_releasenotes {
local project_dir="$1" local project_dir="$1"
local jira_fix_version="$2" local jira_fix_version="$2"
local previous_jira_fix_version="$3"
local jira_project local jira_project
local timing_token local timing_token
timing_token="$(start_step)" timing_token="$(start_step)"
@ -583,7 +581,7 @@ function update_releasenotes {
"${project_dir}/CHANGES.md" || true "${project_dir}/CHANGES.md" || true
else else
# should be hbase 3.x, will copy CHANGES.md from archive.a.o/dist # should be hbase 3.x, will copy CHANGES.md from archive.a.o/dist
curl --location --fail --silent --show-error --output ${project_dir}/CHANGES.md "https://archive.apache.org/dist/hbase/${previous_jira_fix_version}/CHANGES.md" curl --location --fail --silent --show-error --output ${project_dir}/CHANGES.md "https://archive.apache.org/dist/hbase/${PREV_VERSION}/CHANGES.md"
fi fi
if [ -f "${project_dir}/RELEASENOTES.md" ]; then if [ -f "${project_dir}/RELEASENOTES.md" ]; then
sed -i -e \ sed -i -e \
@ -591,7 +589,7 @@ function update_releasenotes {
"${project_dir}/RELEASENOTES.md" || true "${project_dir}/RELEASENOTES.md" || true
else else
# should be hbase 3.x, will copy CHANGES.md from archive.a.o/dist # should be hbase 3.x, will copy CHANGES.md from archive.a.o/dist
curl --location --fail --silent --show-error --output ${project_dir}/RELEASENOTES.md "https://archive.apache.org/dist/hbase/${previous_jira_fix_version}/RELEASENOTES.md" curl --location --fail --silent --show-error --output ${project_dir}/RELEASENOTES.md "https://archive.apache.org/dist/hbase/${PREV_VERSION}/RELEASENOTES.md"
fi fi
# Yetus will not generate CHANGES if no JIRAs fixed against the release version # Yetus will not generate CHANGES if no JIRAs fixed against the release version
@ -645,6 +643,48 @@ make_src_release() {
stop_step "${timing_token}" stop_step "${timing_token}"
} }
build_release_binary() {
local project="${1}"
local version="${2}"
local base_name="${project}-${version}"
local extra_flags=()
if [[ "${version}" = *-hadoop3 ]] || [[ "${version}" = *-hadoop3-SNAPSHOT ]]; then
extra_flags=("-Drevision=${version}" "-Dhadoop.profile=3.0")
fi
cd "$project" || exit
git clean -d -f -x
# Three invocations of maven. This seems to work. One to
# populate the repo, another to build the site, and then
# a third to assemble the binary artifact. Trying to do
# all in the one invocation fails; a problem in our
# assembly spec to in maven. TODO. Meantime, three invocations.
cmd=("${MVN[@]}" "${extra_flags[@]}" clean install -DskipTests)
echo "${cmd[*]}"
"${cmd[@]}"
cmd=("${MVN[@]}" "${extra_flags[@]}" site -DskipTests)
echo "${cmd[*]}"
"${cmd[@]}"
kick_gpg_agent
cmd=("${MVN[@]}" "${extra_flags[@]}" install assembly:single -DskipTests -Dcheckstyle.skip=true "${PUBLISH_PROFILES[@]}")
echo "${cmd[*]}"
"${cmd[@]}"
# Check there is a bin gz output. The build may not produce one: e.g. hbase-thirdparty.
local f_bin_prefix="./${PROJECT}-assembly/target/${base_name}"
if ls "${f_bin_prefix}"*-bin.tar.gz &>/dev/null; then
cp "${f_bin_prefix}"*-bin.tar.gz ..
cd .. || exit
for i in "${base_name}"*-bin.tar.gz; do
"${GPG}" "${GPG_ARGS[@]}" --armour --output "${i}.asc" --detach-sig "${i}"
"${GPG}" "${GPG_ARGS[@]}" --print-md SHA512 "${i}" > "${i}.sha512"
done
else
cd .. || exit
log "No ${f_bin_prefix}*-bin.tar.gz product; expected?"
fi
}
# Make binary release. # Make binary release.
# Takes as arguments first the project name -- e.g. hbase or hbase-operator-tools # Takes as arguments first the project name -- e.g. hbase or hbase-operator-tools
# -- and then the version string. Expects to find checkout adjacent to this script # -- and then the version string. Expects to find checkout adjacent to this script
@ -662,37 +702,10 @@ make_binary_release() {
local timing_token local timing_token
timing_token="$(start_step)" timing_token="$(start_step)"
rm -rf "${base_name}"-bin* rm -rf "${base_name}"-bin*
cd "$project" || exit
git clean -d -f -x build_release_binary "${project}" "${version}"
# Three invocations of maven. This seems to work. One to if should_build_with_hadoop3 "$project/pom.xml"; then
# populate the repo, another to build the site, and then build_release_binary "${project}" "$(get_hadoop3_version "${version}")"
# a third to assemble the binary artifact. Trying to do
# all in the one invocation fails; a problem in our
# assembly spec to in maven. TODO. Meantime, three invocations.
cmd=("${MVN[@]}" clean install -DskipTests)
echo "${cmd[*]}"
"${cmd[@]}"
cmd=("${MVN[@]}" site -DskipTests)
echo "${cmd[*]}"
"${cmd[@]}"
kick_gpg_agent
cmd=("${MVN[@]}" install assembly:single -DskipTests -Dcheckstyle.skip=true "${PUBLISH_PROFILES[@]}")
echo "${cmd[*]}"
"${cmd[@]}"
# Check there is a bin gz output. The build may not produce one: e.g. hbase-thirdparty.
local f_bin_prefix="./${PROJECT}-assembly/target/${base_name}"
if ls "${f_bin_prefix}"*-bin.tar.gz &>/dev/null; then
cp "${f_bin_prefix}"*-bin.tar.gz ..
cd .. || exit
for i in "${base_name}"*-bin.tar.gz; do
"${GPG}" "${GPG_ARGS[@]}" --armour --output "${i}.asc" --detach-sig "${i}"
"${GPG}" "${GPG_ARGS[@]}" --print-md SHA512 "${i}" > "${i}.sha512"
done
else
cd .. || exit
log "No ${f_bin_prefix}*-bin.tar.gz product; expected?"
fi fi
stop_step "${timing_token}" stop_step "${timing_token}"
@ -746,6 +759,7 @@ function maven_deploy { #inputs: <snapshot|release> <log_file_path>
# Invoke with cwd=$PROJECT # Invoke with cwd=$PROJECT
local deploy_type="$1" local deploy_type="$1"
local mvn_log_file="$2" #secondary log file used later to extract staged_repo_id local mvn_log_file="$2" #secondary log file used later to extract staged_repo_id
local staging_dir
if [[ "$deploy_type" != "snapshot" && "$deploy_type" != "release" ]]; then if [[ "$deploy_type" != "snapshot" && "$deploy_type" != "release" ]]; then
error "unrecognized deploy type, must be 'snapshot'|'release'" error "unrecognized deploy type, must be 'snapshot'|'release'"
fi fi
@ -759,6 +773,8 @@ function maven_deploy { #inputs: <snapshot|release> <log_file_path>
elif [[ "$deploy_type" == "release" ]] && [[ "$RELEASE_VERSION" =~ SNAPSHOT ]]; then elif [[ "$deploy_type" == "release" ]] && [[ "$RELEASE_VERSION" =~ SNAPSHOT ]]; then
error "Non-snapshot release version must not include the word 'SNAPSHOT'; you gave version '$RELEASE_VERSION'" error "Non-snapshot release version must not include the word 'SNAPSHOT'; you gave version '$RELEASE_VERSION'"
fi fi
# Just output to parent directory, the staging directory has a staging prefix already
staging_dir="$(dirname "$(pwd)")/local-staged"
# Publish ${PROJECT} to Maven repo # Publish ${PROJECT} to Maven repo
# shellcheck disable=SC2154 # shellcheck disable=SC2154
log "Publishing ${PROJECT} checkout at '$GIT_REF' ($git_hash)" log "Publishing ${PROJECT} checkout at '$GIT_REF' ($git_hash)"
@ -767,20 +783,44 @@ function maven_deploy { #inputs: <snapshot|release> <log_file_path>
maven_set_version "$RELEASE_VERSION" maven_set_version "$RELEASE_VERSION"
# Prepare for signing # Prepare for signing
kick_gpg_agent kick_gpg_agent
declare -a mvn_goals=(clean) declare -a mvn_extra_flags=()
if ! is_dry_run; then if is_dry_run; then
mvn_goals=("${mvn_goals[@]}" deploy) # In dry run mode, skip deploying to remote repo
mvn_extra_flags=("${mvn_extra_flags[@]}" -DskipRemoteStaging)
fi fi
log "${MVN[@]}" -DskipTests -Dcheckstyle.skip=true "${PUBLISH_PROFILES[@]}" "${mvn_goals[@]}" log "${MVN[@]}" clean deploy -DskipTests -Dcheckstyle.skip=true \
-DaltStagingDirectory="${staging_dir}" "${PUBLISH_PROFILES[@]}" "${mvn_extra_flags[@]}"
log "Logging to ${mvn_log_file}. This will take a while..." log "Logging to ${mvn_log_file}. This will take a while..."
rm -f "$mvn_log_file" rm -f "$mvn_log_file"
# The tortuous redirect in the next command allows mvn's stdout and stderr to go to mvn_log_file, # The tortuous redirect in the next command allows mvn's stdout and stderr to go to mvn_log_file,
# while also sending stderr back to the caller. # while also sending stderr back to the caller.
# shellcheck disable=SC2094 # shellcheck disable=SC2094
if ! "${MVN[@]}" -DskipTests -Dcheckstyle.skip=true "${PUBLISH_PROFILES[@]}" \ if ! "${MVN[@]}" clean deploy -DskipTests -Dcheckstyle.skip=true "${PUBLISH_PROFILES[@]}" \
"${mvn_goals[@]}" 1>> "$mvn_log_file" 2> >( tee -a "$mvn_log_file" >&2 ); then -DaltStagingDirectory="${staging_dir}" "${PUBLISH_PROFILES[@]}" "${mvn_extra_flags[@]}" \
1>> "$mvn_log_file" 2> >( tee -a "$mvn_log_file" >&2 ); then
error "Deploy build failed, for details see log at '$mvn_log_file'." error "Deploy build failed, for details see log at '$mvn_log_file'."
fi fi
local hadoop3_version
if should_build_with_hadoop3 pom.xml; then
hadoop3_version="$(get_hadoop3_version "${RELEASE_VERSION}")"
hadoop3_staging_dir="${staging_dir}-hadoop3"
log "Deploying artifacts for hadoop3..."
log "${MVN[@]}" clean deploy -DskipTests -Dcheckstyle.skip=true \
-Drevision="${hadoop3_version}" -Dhadoop.profile=3.0 \
-DaltStagingDirectory="${hadoop3_staging_dir}" "${PUBLISH_PROFILES[@]}" "${mvn_extra_flags[@]}"
{
echo "========================================================================"
echo "Deploy build for hadoop3"
echo "========================================================================"
} >> "$mvn_log_file"
# shellcheck disable=SC2094
if ! "${MVN[@]}" clean deploy -DskipTests -Dcheckstyle.skip=true "${PUBLISH_PROFILES[@]}" \
-Drevision="${hadoop3_version}" -Dhadoop.profile=3.0 \
-DaltStagingDirectory="${hadoop3_staging_dir}" "${PUBLISH_PROFILES[@]}" "${mvn_extra_flags[@]}" \
1>> "$mvn_log_file" 2> >( tee -a "$mvn_log_file" >&2 ); then
error "Deploy build failed, for details see log at '$mvn_log_file'."
fi
fi
log "BUILD SUCCESS." log "BUILD SUCCESS."
stop_step "${timing_token}" stop_step "${timing_token}"
return 0 return 0
@ -798,3 +838,36 @@ function is_tracked() {
git ls-files --error-unmatch "$file" &>/dev/null git ls-files --error-unmatch "$file" &>/dev/null
return $? return $?
} }
# When we have all the below conditions matched, we will build hadoop3 binaries
# 1. Use $revision place holder as version in pom
# 2. Has a hadoop-2.0 profile
# 3. Has a hadoop-3.0 profile
function should_build_with_hadoop3() {
local pom="$1"
maven_version="$(parse_version < "${pom}")"
# We do not want to expand ${revision} here, see https://maven.apache.org/maven-ci-friendly.html
# If we use ${revision} as placeholder, the way to bump maven version will be different
# shellcheck disable=SC2016
if [[ "${maven_version}" != '${revision}' ]]; then
return 1
fi
if ! xmllint --xpath "//*[local-name()='project']/*[local-name()='profiles']/*[local-name()='profile']/*[local-name()='id']/text()" "${pom}" \
| grep -q ^hadoop-2.0$; then
return 1
fi
if ! xmllint --xpath "//*[local-name()='project']/*[local-name()='profiles']/*[local-name()='profile']/*[local-name()='id']/text()" "${pom}" \
| grep -q ^hadoop-3.0$; then
return 1
fi
return 0
}
function get_hadoop3_version() {
local version="$1"
if [[ "${version}" =~ -SNAPSHOT$ ]]; then
echo "${version/-SNAPSHOT/-hadoop3-SNAPSHOT}"
else
echo "${version}-hadoop3"
fi
}

View File

@ -23,6 +23,10 @@ Maven artifacts are available in a staging repository at:
https://repository.apache.org/content/repositories/${staged_repo_id}/ https://repository.apache.org/content/repositories/${staged_repo_id}/
Maven artifacts for hadoop3 are available in a staging repository at:
https://repository.apache.org/content/repositories/${hadoop3_staged_repo_id}/
Artifacts were signed with the ${GPG_KEY} key which can be found in: Artifacts were signed with the ${GPG_KEY} key which can be found in:
https://downloads.apache.org/hbase/KEYS https://downloads.apache.org/hbase/KEYS