diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile index babd90005ab..89026f29e17 100644 --- a/dev-support/Jenkinsfile +++ b/dev-support/Jenkinsfile @@ -572,6 +572,7 @@ pipeline { "hadoop-2/bin/hadoop" \ hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \ hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \ + hadoop-2/share/hadoop/yarn/timelineservice/hadoop-yarn-server-timelineservice-*.jar \ >output-integration/hadoop-2.log 2>&1 ; then echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile exit 2 @@ -590,6 +591,7 @@ pipeline { hadoop-3/bin/hadoop \ hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \ hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \ + hadoop-3/share/hadoop/yarn/timelineservice/hadoop-yarn-server-timelineservice-*.jar \ >output-integration/hadoop-3.log 2>&1 ; then echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile exit 2 @@ -604,6 +606,7 @@ pipeline { hadoop-3/bin/hadoop \ hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \ hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \ + hadoop-3/share/hadoop/yarn/timelineservice/hadoop-yarn-server-timelineservice-*.jar \ >output-integration/hadoop-3-shaded.log 2>&1 ; then echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile exit 2 diff --git a/dev-support/hbase_nightly_pseudo-distributed-test.sh b/dev-support/hbase_nightly_pseudo-distributed-test.sh index cc2dd5ec4e6..60600b82c1c 100755 --- a/dev-support/hbase_nightly_pseudo-distributed-test.sh +++ b/dev-support/hbase_nightly_pseudo-distributed-test.sh @@ -18,7 +18,7 @@ set -e function usage { - echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar" + echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/hadoop/hadoop-yarn-server-timelineservice.jar" echo "" echo " --zookeeper-data /path/to/use Where the embedded zookeeper instance should write its data." echo " defaults to 'zk-data' in the working-dir." @@ -33,7 +33,7 @@ function usage { exit 1 } # if no args specified, show usage -if [ $# -lt 4 ]; then +if [ $# -lt 5 ]; then usage fi @@ -62,13 +62,14 @@ do done # should still have where component checkout is. -if [ $# -lt 4 ]; then +if [ $# -lt 5 ]; then usage fi component_install="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")" hadoop_exec="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")" yarn_server_tests_test_jar="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")" mapred_jobclient_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")" +yarn_server_timelineservice_jar="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")" if [ ! -x "${hadoop_exec}" ]; then echo "hadoop cli does not appear to be executable." >&2 @@ -276,7 +277,16 @@ trap cleanup EXIT SIGQUIT echo "Starting up Hadoop" -HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" & +if [ "${hadoop_version%.*.*}" -gt 2 ]; then + if [ ! -f "${yarn_server_timelineservice_jar}" ]; then + echo "Specified YARN server timeline service jar is not a file." >&2 + exit 1 + fi + HADOOP_CLASSPATH="${yarn_server_timelineservice_jar}":"${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" & +else + HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" & +fi + echo "$!" > "${working_dir}/hadoop.pid" sleep_time=2