HBASE-24049 use hadoop-2.10.0 for "packaging and integration" check
Signed-off-by: stack <stack@apache.org> Signed-off-by: Jan Hentschel <jan.hentschel@ultratendency.com> Signed-off-by: Viraj Jasani <vjasani@apache.org>
This commit is contained in:
parent
10cbb3fb73
commit
ea69b8711f
|
@ -121,7 +121,7 @@ pipeline {
|
||||||
}
|
}
|
||||||
stage ('hadoop 2 cache') {
|
stage ('hadoop 2 cache') {
|
||||||
environment {
|
environment {
|
||||||
HADOOP2_VERSION="2.8.5"
|
HADOOP2_VERSION="2.10.0"
|
||||||
}
|
}
|
||||||
steps {
|
steps {
|
||||||
// directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
|
// directory must be unique for each parallel stage, because jenkins runs them in the same workspace :(
|
||||||
|
@ -656,6 +656,7 @@ pipeline {
|
||||||
--hbase-client-install "hbase-client" \
|
--hbase-client-install "hbase-client" \
|
||||||
"hbase-install" \
|
"hbase-install" \
|
||||||
"hadoop-2/bin/hadoop" \
|
"hadoop-2/bin/hadoop" \
|
||||||
|
hadoop-2/share/hadoop/yarn/timelineservice \
|
||||||
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
||||||
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
||||||
hadoop-2/bin/mapred \
|
hadoop-2/bin/mapred \
|
||||||
|
@ -675,6 +676,7 @@ pipeline {
|
||||||
--hbase-client-install hbase-client \
|
--hbase-client-install hbase-client \
|
||||||
hbase-install \
|
hbase-install \
|
||||||
hadoop-3/bin/hadoop \
|
hadoop-3/bin/hadoop \
|
||||||
|
hadoop-3/share/hadoop/yarn/timelineservice \
|
||||||
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
||||||
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
||||||
hadoop-3/bin/mapred \
|
hadoop-3/bin/mapred \
|
||||||
|
@ -690,6 +692,7 @@ pipeline {
|
||||||
--hbase-client-install hbase-client \
|
--hbase-client-install hbase-client \
|
||||||
hbase-install \
|
hbase-install \
|
||||||
hadoop-3/bin/hadoop \
|
hadoop-3/bin/hadoop \
|
||||||
|
hadoop-3/share/hadoop/yarn/timelineservice \
|
||||||
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
||||||
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
||||||
hadoop-3/bin/mapred \
|
hadoop-3/bin/mapred \
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
function usage {
|
function usage {
|
||||||
echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable"
|
echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/share/hadoop/yarn/timelineservice /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable"
|
||||||
echo ""
|
echo ""
|
||||||
echo " --zookeeper-data /path/to/use Where the embedded zookeeper instance should write its data."
|
echo " --zookeeper-data /path/to/use Where the embedded zookeeper instance should write its data."
|
||||||
echo " defaults to 'zk-data' in the working-dir."
|
echo " defaults to 'zk-data' in the working-dir."
|
||||||
|
@ -67,9 +67,10 @@ if [ $# -lt 5 ]; then
|
||||||
fi
|
fi
|
||||||
component_install="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
|
component_install="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
|
||||||
hadoop_exec="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")"
|
hadoop_exec="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")"
|
||||||
yarn_server_tests_test_jar="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")"
|
timeline_service_dir="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")"
|
||||||
mapred_jobclient_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")"
|
yarn_server_tests_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")"
|
||||||
mapred_exec="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")"
|
mapred_jobclient_test_jar="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")"
|
||||||
|
mapred_exec="$(cd "$(dirname "$6")"; pwd)/$(basename "$6")"
|
||||||
|
|
||||||
if [ ! -x "${hadoop_exec}" ]; then
|
if [ ! -x "${hadoop_exec}" ]; then
|
||||||
echo "hadoop cli does not appear to be executable." >&2
|
echo "hadoop cli does not appear to be executable." >&2
|
||||||
|
@ -285,18 +286,25 @@ echo "Starting up Hadoop"
|
||||||
if [ "${hadoop_version%.*.*}" -gt 2 ]; then
|
if [ "${hadoop_version%.*.*}" -gt 2 ]; then
|
||||||
"${mapred_exec}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
|
"${mapred_exec}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
|
||||||
else
|
else
|
||||||
HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
|
HADOOP_CLASSPATH="${timeline_service_dir}/*:${timeline_service_dir}/lib/*:${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "$!" > "${working_dir}/hadoop.pid"
|
echo "$!" > "${working_dir}/hadoop.pid"
|
||||||
|
|
||||||
|
# 2 + 4 + 8 + .. + 256 ~= 8.5 minutes.
|
||||||
|
max_sleep_time=512
|
||||||
sleep_time=2
|
sleep_time=2
|
||||||
until [ -s "${working_dir}/hbase-conf/core-site.xml" ]; do
|
until [[ -s "${working_dir}/hbase-conf/core-site.xml" || "${sleep_time}" -ge "${max_sleep_time}" ]]; do
|
||||||
printf '\twaiting for Hadoop to finish starting up.\n'
|
printf '\twaiting for Hadoop to finish starting up.\n'
|
||||||
sleep "${sleep_time}"
|
sleep "${sleep_time}"
|
||||||
sleep_time="$((sleep_time*2))"
|
sleep_time="$((sleep_time*2))"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
if [ "${sleep_time}" -ge "${max_sleep_time}" ] ; then
|
||||||
|
echo "time out waiting for Hadoop to startup" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
if [ "${hadoop_version%.*.*}" -gt 2 ]; then
|
if [ "${hadoop_version%.*.*}" -gt 2 ]; then
|
||||||
echo "Verifying configs"
|
echo "Verifying configs"
|
||||||
"${hadoop_exec}" --config "${working_dir}/hbase-conf/" conftest
|
"${hadoop_exec}" --config "${working_dir}/hbase-conf/" conftest
|
||||||
|
|
|
@ -182,7 +182,7 @@ if mvn -DskipTests -Prelease --batch-mode -Dmaven.repo.local="${m2_tarbuild}" cl
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
echo "Building a binary tarball from the source tarball failed. see srctarball_install.log for details."
|
echo "Building a binary tarball from the source tarball failed. see ${working_dir}/srctarball_install.log for details."
|
||||||
# Copy up the rat.txt to the working dir so available in build archive in case rat complaints.
|
# Copy up the rat.txt to the working dir so available in build archive in case rat complaints.
|
||||||
# rat.txt can be under any module target dir... copy them all up renaming them to include parent dir as we go.
|
# rat.txt can be under any module target dir... copy them all up renaming them to include parent dir as we go.
|
||||||
find ${unpack_dir} -name rat.txt -type f | while IFS= read -r NAME; do cp -v "$NAME" "${working_dir}/${NAME//\//_}"; done
|
find ${unpack_dir} -name rat.txt -type f | while IFS= read -r NAME; do cp -v "$NAME" "${working_dir}/${NAME//\//_}"; done
|
||||||
|
|
Loading…
Reference in New Issue