HBASE-22490 (addendum) Nightly client integration test fails with hadoop-3
This commit is contained in:
parent
0fcd7cae55
commit
12584ebf46
|
@ -572,6 +572,7 @@ pipeline {
|
||||||
"hadoop-2/bin/hadoop" \
|
"hadoop-2/bin/hadoop" \
|
||||||
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
||||||
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
||||||
|
hadoop-2/bin/mapred \
|
||||||
>output-integration/hadoop-2.log 2>&1 ; then
|
>output-integration/hadoop-2.log 2>&1 ; then
|
||||||
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
|
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
|
||||||
exit 2
|
exit 2
|
||||||
|
@ -590,6 +591,7 @@ pipeline {
|
||||||
hadoop-3/bin/hadoop \
|
hadoop-3/bin/hadoop \
|
||||||
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
||||||
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
||||||
|
hadoop-3/bin/mapred \
|
||||||
>output-integration/hadoop-3.log 2>&1 ; then
|
>output-integration/hadoop-3.log 2>&1 ; then
|
||||||
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
|
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
|
||||||
exit 2
|
exit 2
|
||||||
|
@ -604,6 +606,7 @@ pipeline {
|
||||||
hadoop-3/bin/hadoop \
|
hadoop-3/bin/hadoop \
|
||||||
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
|
||||||
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
|
||||||
|
hadoop-3/bin/mapred \
|
||||||
>output-integration/hadoop-3-shaded.log 2>&1 ; then
|
>output-integration/hadoop-3-shaded.log 2>&1 ; then
|
||||||
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
|
echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
|
||||||
exit 2
|
exit 2
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
function usage {
|
function usage {
|
||||||
echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar"
|
echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable"
|
||||||
echo ""
|
echo ""
|
||||||
echo " --zookeeper-data /path/to/use Where the embedded zookeeper instance should write its data."
|
echo " --zookeeper-data /path/to/use Where the embedded zookeeper instance should write its data."
|
||||||
echo " defaults to 'zk-data' in the working-dir."
|
echo " defaults to 'zk-data' in the working-dir."
|
||||||
|
@ -33,7 +33,7 @@ function usage {
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
# if no args specified, show usage
|
# if no args specified, show usage
|
||||||
if [ $# -lt 4 ]; then
|
if [ $# -lt 5 ]; then
|
||||||
usage
|
usage
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -62,19 +62,25 @@ do
|
||||||
done
|
done
|
||||||
|
|
||||||
# should still have where component checkout is.
|
# should still have where component checkout is.
|
||||||
if [ $# -lt 4 ]; then
|
if [ $# -lt 5 ]; then
|
||||||
usage
|
usage
|
||||||
fi
|
fi
|
||||||
component_install="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
|
component_install="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
|
||||||
hadoop_exec="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")"
|
hadoop_exec="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")"
|
||||||
yarn_server_tests_test_jar="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")"
|
yarn_server_tests_test_jar="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")"
|
||||||
mapred_jobclient_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")"
|
mapred_jobclient_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")"
|
||||||
|
mapred_exec="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")"
|
||||||
|
|
||||||
if [ ! -x "${hadoop_exec}" ]; then
|
if [ ! -x "${hadoop_exec}" ]; then
|
||||||
echo "hadoop cli does not appear to be executable." >&2
|
echo "hadoop cli does not appear to be executable." >&2
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ ! -x "${mapred_exec}" ]; then
|
||||||
|
echo "mapred cli does not appear to be executable." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
if [ ! -d "${component_install}" ]; then
|
if [ ! -d "${component_install}" ]; then
|
||||||
echo "Path to HBase binary install should be a directory." >&2
|
echo "Path to HBase binary install should be a directory." >&2
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -276,7 +282,12 @@ trap cleanup EXIT SIGQUIT
|
||||||
|
|
||||||
echo "Starting up Hadoop"
|
echo "Starting up Hadoop"
|
||||||
|
|
||||||
HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
|
if [ "${hadoop_version%.*.*}" -gt 2 ]; then
|
||||||
|
"${mapred_exec}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
|
||||||
|
else
|
||||||
|
HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
|
||||||
|
fi
|
||||||
|
|
||||||
echo "$!" > "${working_dir}/hadoop.pid"
|
echo "$!" > "${working_dir}/hadoop.pid"
|
||||||
|
|
||||||
sleep_time=2
|
sleep_time=2
|
||||||
|
|
Loading…
Reference in New Issue