From 31528bcdaf6a07480578d779ad207b91698637bd Mon Sep 17 00:00:00 2001 From: Maytas Monsereenusorn <52679095+maytasm3@users.noreply.github.com> Date: Wed, 12 Feb 2020 16:36:31 -0800 Subject: [PATCH] Integration tests for JDK 11 (#9249) * Integration tests for JDK 11 * fix vm option * fix superviosrd * fix pom * add integration tests for java 11 * add logs * update docs * Update dockerfile to ack AdoptOpenJdk for Java 11 install commands --- .travis.yml | 101 +++++++++++++--- integration-tests/common_run_cluster.sh | 108 ++++++++++++++++++ integration-tests/docker-base/README.md | 31 +++++ .../docker-base/jdk11/Dockerfile | 62 ++++++++++ .../docker-base/{ => jdk8}/Dockerfile | 6 +- integration-tests/docker-base/setup.sh | 3 - integration-tests/docker/Dockerfile | 11 +- integration-tests/docker/kafka.conf | 1 - integration-tests/docker/middlemanager.conf | 2 +- .../docker/router-custom-check-tls.conf | 1 - .../docker/router-no-client-auth-tls.conf | 1 - .../docker/router-permissive-tls.conf | 1 - integration-tests/docker/supervisord.conf | 1 + integration-tests/pom.xml | 5 +- integration-tests/run_cluster.sh | 97 ---------------- .../run_cluster_using_java_runtime_11.sh | 31 +++++ .../run_cluster_using_java_runtime_8.sh | 31 +++++ 17 files changed, 372 insertions(+), 121 deletions(-) create mode 100755 integration-tests/common_run_cluster.sh create mode 100644 integration-tests/docker-base/README.md create mode 100644 integration-tests/docker-base/jdk11/Dockerfile rename integration-tests/docker-base/{ => jdk8}/Dockerfile (87%) delete mode 100755 integration-tests/run_cluster.sh create mode 100755 integration-tests/run_cluster_using_java_runtime_11.sh create mode 100755 integration-tests/run_cluster_using_java_runtime_8.sh diff --git a/.travis.yml b/.travis.yml index 4c2c3d2069e..5b4645650e2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -268,13 +268,18 @@ jobs: " && false; } + # Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key) + # Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=) + # (Currently integration tests only support running with jvm runtime 8 and 11) + # START - Integration tests for Compile with Java 8 and Run with Java 8 - &integration_batch_index - name: "batch index integration test" + name: "(Compile=openjdk8, Run=openjdk8) batch index integration test" + jdk: openjdk8 services: &integration_test_services - docker - env: TESTNG_GROUPS='-Dgroups=batch-index' + env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' script: &run_integration_test - - ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${MAVEN_SKIP} + - ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} ${MAVEN_SKIP} after_failure: &integration_test_diags - for v in ~/shared/logs/*.log ; do echo $v logtail ======================== ; tail -100 $v ; @@ -285,40 +290,110 @@ jobs: done - &integration_perfect_rollup_parallel_batch_index - name: "perfect rollup parallel batch index integration test" + name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test" + jdk: openjdk8 services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' + env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' script: *run_integration_test after_failure: *integration_test_diags - &integration_kafka_index - name: "kafka index integration test" + name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test" + jdk: openjdk8 services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=kafka-index' + env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' script: *run_integration_test after_failure: *integration_test_diags - &integration_query - name: "query integration test" + name: "(Compile=openjdk8, Run=openjdk8) query integration test" + jdk: openjdk8 services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=query' + env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' script: *run_integration_test after_failure: *integration_test_diags - &integration_realtime_index - name: "realtime index integration test" + name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test" + jdk: openjdk8 services: *integration_test_services - env: TESTNG_GROUPS='-Dgroups=realtime-index' + env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' script: *run_integration_test after_failure: *integration_test_diags - &integration_tests - name: "other integration test" + name: "(Compile=openjdk8, Run=openjdk8) other integration test" + jdk: openjdk8 services: *integration_test_services - env: TESTNG_GROUPS='-DexcludedGroups=batch-index,perfect-rollup-parallel-batch-index,kafka-index,query,realtime-index' + env: TESTNG_GROUPS='-DexcludedGroups=batch-index,perfect-rollup-parallel-batch-index,kafka-index,query,realtime-index' JVM_RUNTIME='-Djvm.runtime=8' script: *run_integration_test after_failure: *integration_test_diags + # END - Integration tests for Compile with Java 8 and Run with Java 8 + # START - Integration tests for Compile with Java 11 and Run with Java 11 + - <<: *integration_batch_index + name: "(Compile=openjdk11, Run=openjdk11) batch index integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' + + - <<: *integration_perfect_rollup_parallel_batch_index + name: "(Compile=openjdk11, Run=openjdk11) perfect rollup parallel batch index integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' + + - <<: *integration_kafka_index + name: "(Compile=openjdk11, Run=openjdk11) kafka index integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=11' + + - <<: *integration_query + name: "(Compile=openjdk11, Run=openjdk11) query integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' + + - <<: *integration_realtime_index + name: "(Compile=openjdk11, Run=openjdk11) realtime index integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' + + - <<: *integration_tests + name: "(Compile=openjdk11, Run=openjdk11) other integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-DexcludedGroups=batch-index,perfect-rollup-parallel-batch-index,kafka-index,query,realtime-index' JVM_RUNTIME='-Djvm.runtime=11' + # END - Integration tests for Compile with Java 11 and Run with Java 11 + + # START - Integration tests for Compile with Java 11 and Run with Java 8 + - <<: *integration_batch_index + name: "(Compile=openjdk11, Run=openjdk8) batch index integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' + + - <<: *integration_perfect_rollup_parallel_batch_index + name: "(Compile=openjdk11, Run=openjdk8) perfect rollup parallel batch index integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' + + - <<: *integration_kafka_index + name: "(Compile=openjdk11, Run=openjdk8) kafka index integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' + + - <<: *integration_query + name: "(Compile=openjdk11, Run=openjdk8) query integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' + + - <<: *integration_realtime_index + name: "(Compile=openjdk11, Run=openjdk8) realtime index integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' + + - <<: *integration_tests + name: "(Compile=openjdk11, Run=openjdk8) other integration test" + jdk: openjdk11 + env: TESTNG_GROUPS='-DexcludedGroups=batch-index,perfect-rollup-parallel-batch-index,kafka-index,query,realtime-index' JVM_RUNTIME='-Djvm.runtime=8' + # END - Integration tests for Compile with Java 11 and Run with Java 8 + - name: "security vulnerabilities" stage: cron install: skip diff --git a/integration-tests/common_run_cluster.sh b/integration-tests/common_run_cluster.sh new file mode 100755 index 00000000000..3212d5b48af --- /dev/null +++ b/integration-tests/common_run_cluster.sh @@ -0,0 +1,108 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Cleanup +cleanup() +{ + for node in druid-historical druid-coordinator druid-overlord druid-router druid-router-permissive-tls druid-router-no-client-auth-tls druid-router-custom-check-tls druid-broker druid-middlemanager druid-zookeeper-kafka druid-metadata-storage; + do + docker stop $node + docker rm $node + done + + docker network rm druid-it-net +} + +# Druid environment and jars setup +setup() +{ + # environment variables + DIR=$(cd $(dirname $0) && pwd) + DOCKERDIR=$DIR/docker + SHARED_DIR=${HOME}/shared + SUPERVISORDIR=/usr/lib/druid/conf + RESOURCEDIR=$DIR/src/test/resources + + # so docker IP addr will be known during docker build + echo ${DOCKER_IP:=127.0.0.1} > $DOCKERDIR/docker_ip + + # setup client keystore + ./docker/tls/generate-client-certs-and-keystores.sh + rm -rf docker/client_tls + cp -r client_tls docker/client_tls + + # Make directories if they dont exist + mkdir -p $SHARED_DIR/logs + mkdir -p $SHARED_DIR/tasklogs + + # install druid jars + rm -rf $SHARED_DIR/docker + cp -R docker $SHARED_DIR/docker + mvn -B dependency:copy-dependencies -DoutputDirectory=$SHARED_DIR/docker/lib + + # install logging config + cp src/main/resources/log4j2.xml $SHARED_DIR/docker/lib/log4j2.xml + + # copy the integration test jar, it provides test-only extension implementations + cp target/druid-integration-tests*.jar $SHARED_DIR/docker/lib + + # one of the integration tests needs the wikiticker sample data + mkdir -p $SHARED_DIR/wikiticker-it + cp ../examples/quickstart/tutorial/wikiticker-2015-09-12-sampled.json.gz $SHARED_DIR/wikiticker-it/wikiticker-2015-09-12-sampled.json.gz + cp docker/wiki-simple-lookup.json $SHARED_DIR/wikiticker-it/wiki-simple-lookup.json +} + +create_docker_network() +{ + docker network create --subnet=172.172.172.0/24 druid-it-net +} + +# Start docker containers for all Druid processes and dependencies +start_docker_containers() +{ + # Start zookeeper and kafka + docker run -d --privileged --net druid-it-net --ip 172.172.172.2 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-zookeeper-kafka -p 2181:2181 -p 9092:9092 -p 9093:9093 -v $SHARED_DIR:/shared -v $DOCKERDIR/zookeeper.conf:$SUPERVISORDIR/zookeeper.conf -v $DOCKERDIR/kafka.conf:$SUPERVISORDIR/kafka.conf druid/cluster + + # Start MYSQL + docker run -d --privileged --net druid-it-net --ip 172.172.172.3 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-metadata-storage -v $SHARED_DIR:/shared -v $DOCKERDIR/metadata-storage.conf:$SUPERVISORDIR/metadata-storage.conf druid/cluster + + # Start Overlord + docker run -d --privileged --net druid-it-net --ip 172.172.172.4 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-overlord -p 8090:8090 -p 8290:8290 -v $SHARED_DIR:/shared -v $DOCKERDIR/overlord.conf:$SUPERVISORDIR/overlord.conf --link druid-metadata-storage:druid-metadata-storage --link druid-zookeeper-kafka:druid-zookeeper-kafka druid/cluster + + # Start Coordinator + docker run -d --privileged --net druid-it-net --ip 172.172.172.5 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-coordinator -p 8081:8081 -p 8281:8281 -v $SHARED_DIR:/shared -v $DOCKERDIR/coordinator.conf:$SUPERVISORDIR/coordinator.conf --link druid-overlord:druid-overlord --link druid-metadata-storage:druid-metadata-storage --link druid-zookeeper-kafka:druid-zookeeper-kafka druid/cluster + + # Start Historical + docker run -d --privileged --net druid-it-net --ip 172.172.172.6 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-historical -p 8083:8083 -p 8283:8283 -v $SHARED_DIR:/shared -v $DOCKERDIR/historical.conf:$SUPERVISORDIR/historical.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka druid/cluster + + # Start Middlemanger + docker run -d --privileged --net druid-it-net --ip 172.172.172.7 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-middlemanager -p 8091:8091 -p 8291:8291 -p 8100:8100 -p 8101:8101 -p 8102:8102 -p 8103:8103 -p 8104:8104 -p 8105:8105 -p 8300:8300 -p 8301:8301 -p 8302:8302 -p 8303:8303 -p 8304:8304 -p 8305:8305 -v $RESOURCEDIR:/resources -v $SHARED_DIR:/shared -v $DOCKERDIR/middlemanager.conf:$SUPERVISORDIR/middlemanager.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-overlord:druid-overlord druid/cluster + + # Start Broker + docker run -d --privileged --net druid-it-net --ip 172.172.172.8 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-broker -p 8082:8082 -p 8282:8282 -v $SHARED_DIR:/shared -v $DOCKERDIR/broker.conf:$SUPERVISORDIR/broker.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-middlemanager:druid-middlemanager --link druid-historical:druid-historical druid/cluster + + # Start Router + docker run -d --privileged --net druid-it-net --ip 172.172.172.9 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-router -p 8888:8888 -p 9088:9088 -v $SHARED_DIR:/shared -v $DOCKERDIR/router.conf:$SUPERVISORDIR/router.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-coordinator:druid-coordinator --link druid-broker:druid-broker druid/cluster + + # Start Router with permissive TLS settings (client auth enabled, no hostname verification, no revocation check) + docker run -d --privileged --net druid-it-net --ip 172.172.172.10 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-router-permissive-tls -p 8889:8889 -p 9089:9089 -v $SHARED_DIR:/shared -v $DOCKERDIR/router-permissive-tls.conf:$SUPERVISORDIR/router-permissive-tls.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-coordinator:druid-coordinator --link druid-broker:druid-broker druid/cluster + + # Start Router with TLS but no client auth + docker run -d --privileged --net druid-it-net --ip 172.172.172.11 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-router-no-client-auth-tls -p 8890:8890 -p 9090:9090 -v $SHARED_DIR:/shared -v $DOCKERDIR/router-no-client-auth-tls.conf:$SUPERVISORDIR/router-no-client-auth-tls.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-coordinator:druid-coordinator --link druid-broker:druid-broker druid/cluster + + # Start Router with custom TLS cert checkers + docker run -d --privileged --net druid-it-net --ip 172.172.172.12 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --hostname druid-router-custom-check-tls --name druid-router-custom-check-tls -p 8891:8891 -p 9091:9091 -v $SHARED_DIR:/shared -v $DOCKERDIR/router-custom-check-tls.conf:$SUPERVISORDIR/router-custom-check-tls.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-coordinator:druid-coordinator --link druid-broker:druid-broker druid/cluster +} diff --git a/integration-tests/docker-base/README.md b/integration-tests/docker-base/README.md new file mode 100644 index 00000000000..00923f99cf4 --- /dev/null +++ b/integration-tests/docker-base/README.md @@ -0,0 +1,31 @@ + + +## Base Docker Image for Integration Tests + +### Building Docker Image for Integration Tests + +Run the following commands from /integration-tests/docker-base + +To build docker image for Java JDK8: +- docker build -t druidbase: -f jdk8/Dockerfile . + +To build docker image for Java JDK11: +- docker build -t druidbase: -f jdk11/Dockerfile . + diff --git a/integration-tests/docker-base/jdk11/Dockerfile b/integration-tests/docker-base/jdk11/Dockerfile new file mode 100644 index 00000000000..c3ea66d5396 --- /dev/null +++ b/integration-tests/docker-base/jdk11/Dockerfile @@ -0,0 +1,62 @@ +# Based on the following projects/files: +# - SequenceIQ hadoop-docker project hosted at https://github.com/sequenceiq/hadoop-docker +# - AdoptOpenJDK openjdk-docker project hosted at https://github.com/AdoptOpenJDK/openjdk-docker +# and modified at the Apache Software Foundation (ASF). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:16.04 + +# Install Java JDK 11 (OpenJDK 11.0.5) +# Sourced from AdoptOpenJDK openjdk-docker project (https://github.com/AdoptOpenJDK/openjdk-docker) +RUN apt-get update \ + && apt-get install -y --no-install-recommends curl ca-certificates fontconfig locales \ + && echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen \ + && locale-gen en_US.UTF-8 \ + && rm -rf /var/lib/apt/lists/* + +RUN set -eux; \ + ARCH="$(dpkg --print-architecture)"; \ + case "${ARCH}" in \ + armhf) \ + ESUM='c6b1fda3f8807028cbfcc34a4ded2e8a5a6b6239d2bcc1f06673ea6b1530df94'; \ + BINARY_URL='https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.5%2B10/OpenJDK11U-jdk_arm_linux_hotspot_11.0.5_10.tar.gz'; \ + ;; \ + ppc64el|ppc64le) \ + ESUM='d763481ddc29ac0bdefb24216b3a0bf9afbb058552682567a075f9c0f7da5814'; \ + BINARY_URL='https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.5%2B10/OpenJDK11U-jdk_ppc64le_linux_hotspot_11.0.5_10.tar.gz'; \ + ;; \ + amd64|x86_64) \ + ESUM='6dd0c9c8a740e6c19149e98034fba8e368fd9aa16ab417aa636854d40db1a161'; \ + BINARY_URL='https://github.com/AdoptOpenJDK/openjdk11-binaries/releases/download/jdk-11.0.5%2B10/OpenJDK11U-jdk_x64_linux_hotspot_11.0.5_10.tar.gz'; \ + ;; \ + *) \ + echo "Unsupported arch: ${ARCH}"; \ + exit 1; \ + ;; \ + esac; \ + curl -LfsSo /tmp/openjdk.tar.gz ${BINARY_URL}; \ + echo "${ESUM} */tmp/openjdk.tar.gz" | sha256sum -c -; \ + mkdir -p /opt/java/openjdk; \ + cd /opt/java/openjdk; \ + tar -xf /tmp/openjdk.tar.gz --strip-components=1; \ + rm -rf /tmp/openjdk.tar.gz; + +ENV JAVA_HOME=/opt/java/openjdk \ + PATH="/opt/java/openjdk/bin:$PATH" + +# Bundle everything into one script so cleanup can reduce image size. +# Otherwise docker's layered images mean that things are not actually deleted. + +COPY setup.sh /root/setup.sh +RUN chmod 0755 /root/setup.sh && /root/setup.sh diff --git a/integration-tests/docker-base/Dockerfile b/integration-tests/docker-base/jdk8/Dockerfile similarity index 87% rename from integration-tests/docker-base/Dockerfile rename to integration-tests/docker-base/jdk8/Dockerfile index 7be192f3b2d..7784bf5924b 100644 --- a/integration-tests/docker-base/Dockerfile +++ b/integration-tests/docker-base/jdk8/Dockerfile @@ -16,8 +16,12 @@ FROM ubuntu:16.04 +# Install Java JDK 8 +RUN apt-get update \ + && apt-get install -y openjdk-8-jdk + # Bundle everything into one script so cleanup can reduce image size. # Otherwise docker's layered images mean that things are not actually deleted. COPY setup.sh /root/setup.sh -RUN chmod 0755 /root/setup.sh && /root/setup.sh +RUN chmod 0755 /root/setup.sh && /root/setup.sh \ No newline at end of file diff --git a/integration-tests/docker-base/setup.sh b/integration-tests/docker-base/setup.sh index f2be680903b..a6dc55283a9 100644 --- a/integration-tests/docker-base/setup.sh +++ b/integration-tests/docker-base/setup.sh @@ -24,9 +24,6 @@ apt-get update # wget apt-get install -y wget -# Java -apt-get install -y openjdk-8-jdk - # MySQL (Metadata store) apt-get install -y mysql-server diff --git a/integration-tests/docker/Dockerfile b/integration-tests/docker/Dockerfile index 6c5094cf7d1..4886826ac4b 100644 --- a/integration-tests/docker/Dockerfile +++ b/integration-tests/docker/Dockerfile @@ -13,8 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. + +# This is default value for base image in case DOCKER_IMAGE is not given when building +ARG DOCKER_IMAGE=imply/druiditbase:openjdk-1.8.0_191-1 # Base image is built from integration-tests/docker-base in the Druid repo -FROM imply/druiditbase:0.2 +FROM $DOCKER_IMAGE + +# Verify Java version +ARG DOCKER_IMAGE +ENV DOCKER_IMAGE_USED=$DOCKER_IMAGE +RUN echo "Built using base docker image DOCKER_IMAGE_USED=$DOCKER_IMAGE_USED" +RUN java -version RUN echo "[mysqld]\ncharacter-set-server=utf8\ncollation-server=utf8_bin\n" >> /etc/mysql/my.cnf diff --git a/integration-tests/docker/kafka.conf b/integration-tests/docker/kafka.conf index 861aa36b32c..bccd89dabe7 100644 --- a/integration-tests/docker/kafka.conf +++ b/integration-tests/docker/kafka.conf @@ -1,5 +1,4 @@ [program:kafka] command=/usr/local/kafka/bin/kafka-server-start.sh /usr/local/kafka/config/server.properties -user=daemon priority=0 stdout_logfile=/shared/logs/kafka.log diff --git a/integration-tests/docker/middlemanager.conf b/integration-tests/docker/middlemanager.conf index d32e6b130bf..7487edb4c47 100644 --- a/integration-tests/docker/middlemanager.conf +++ b/integration-tests/docker/middlemanager.conf @@ -12,7 +12,7 @@ command=java -Ddruid.worker.capacity=3 -Ddruid.indexer.logs.directory=/shared/tasklogs -Ddruid.storage.storageDirectory=/shared/storage - -Ddruid.indexer.runner.javaOpts="-server -Xmx256m -Xms256m -XX:NewSize=128m -XX:MaxNewSize=128m -XX:+UseG1GC -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Dlog4j.configurationFile=/shared/docker/lib/log4j2.xml" + -Ddruid.indexer.runner.javaOpts="-server -Xmx256m -Xms256m -XX:NewSize=128m -XX:MaxNewSize=128m -XX:+UseG1GC -XX:+PrintGCDetails -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Dlog4j.configurationFile=/shared/docker/lib/log4j2.xml" -Ddruid.indexer.fork.property.druid.processing.buffer.sizeBytes=25000000 -Ddruid.indexer.fork.property.druid.processing.numThreads=1 -Ddruid.indexer.fork.server.http.numThreads=20 diff --git a/integration-tests/docker/router-custom-check-tls.conf b/integration-tests/docker/router-custom-check-tls.conf index c7862c6442d..e57ae82d01f 100644 --- a/integration-tests/docker/router-custom-check-tls.conf +++ b/integration-tests/docker/router-custom-check-tls.conf @@ -4,7 +4,6 @@ command=java -Xmx128m -XX:+UseConcMarkSweepGC -XX:+PrintGCDetails - -XX:+PrintGCTimeStamps -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Ddruid.host=%(ENV_HOST_IP)s diff --git a/integration-tests/docker/router-no-client-auth-tls.conf b/integration-tests/docker/router-no-client-auth-tls.conf index 6e0eaf6ae13..733002d32a3 100644 --- a/integration-tests/docker/router-no-client-auth-tls.conf +++ b/integration-tests/docker/router-no-client-auth-tls.conf @@ -4,7 +4,6 @@ command=java -Xmx128m -XX:+UseConcMarkSweepGC -XX:+PrintGCDetails - -XX:+PrintGCTimeStamps -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Ddruid.host=%(ENV_HOST_IP)s diff --git a/integration-tests/docker/router-permissive-tls.conf b/integration-tests/docker/router-permissive-tls.conf index 2d2fa6bea7a..c78e1ddc253 100644 --- a/integration-tests/docker/router-permissive-tls.conf +++ b/integration-tests/docker/router-permissive-tls.conf @@ -4,7 +4,6 @@ command=java -Xmx128m -XX:+UseConcMarkSweepGC -XX:+PrintGCDetails - -XX:+PrintGCTimeStamps -Duser.timezone=UTC -Dfile.encoding=UTF-8 -Ddruid.host=%(ENV_HOST_IP)s diff --git a/integration-tests/docker/supervisord.conf b/integration-tests/docker/supervisord.conf index 99ab6b4b5cb..cbce4b6eb8a 100644 --- a/integration-tests/docker/supervisord.conf +++ b/integration-tests/docker/supervisord.conf @@ -1,5 +1,6 @@ [supervisord] nodaemon=true +logfile = /shared/logs/supervisord.log [include] files = /usr/lib/druid/conf/*.conf diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml index 561c88b46d3..b76e83676b0 100644 --- a/integration-tests/pom.xml +++ b/integration-tests/pom.xml @@ -265,6 +265,9 @@ integration-tests + + 8 + @@ -278,7 +281,7 @@ pre-integration-test - ${project.basedir}/run_cluster.sh + ${project.basedir}/run_cluster_using_java_runtime_${jvm.runtime}.sh diff --git a/integration-tests/run_cluster.sh b/integration-tests/run_cluster.sh deleted file mode 100755 index 25078eb44c8..00000000000 --- a/integration-tests/run_cluster.sh +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# cleanup -for node in druid-historical druid-coordinator druid-overlord druid-router druid-router-permissive-tls druid-router-no-client-auth-tls druid-router-custom-check-tls druid-broker druid-middlemanager druid-zookeeper-kafka druid-metadata-storage; -do -docker stop $node -docker rm $node -done - -docker network rm druid-it-net - -# environment variables -DIR=$(cd $(dirname $0) && pwd) -DOCKERDIR=$DIR/docker -SHARED_DIR=${HOME}/shared -SUPERVISORDIR=/usr/lib/druid/conf -RESOURCEDIR=$DIR/src/test/resources - -# so docker IP addr will be known during docker build -echo ${DOCKER_IP:=127.0.0.1} > $DOCKERDIR/docker_ip - -# setup client keystore -./docker/tls/generate-client-certs-and-keystores.sh -rm -rf docker/client_tls -cp -r client_tls docker/client_tls - -# Make directories if they dont exist -mkdir -p $SHARED_DIR/logs -mkdir -p $SHARED_DIR/tasklogs - -# install druid jars -rm -rf $SHARED_DIR/docker -cp -R docker $SHARED_DIR/docker -mvn -B dependency:copy-dependencies -DoutputDirectory=$SHARED_DIR/docker/lib - -# install logging config -cp src/main/resources/log4j2.xml $SHARED_DIR/docker/lib/log4j2.xml - -# copy the integration test jar, it provides test-only extension implementations -cp target/druid-integration-tests*.jar $SHARED_DIR/docker/lib - -# one of the integration tests needs the wikiticker sample data -mkdir -p $SHARED_DIR/wikiticker-it -cp ../examples/quickstart/tutorial/wikiticker-2015-09-12-sampled.json.gz $SHARED_DIR/wikiticker-it/wikiticker-2015-09-12-sampled.json.gz -cp docker/wiki-simple-lookup.json $SHARED_DIR/wikiticker-it/wiki-simple-lookup.json - -docker network create --subnet=172.172.172.0/24 druid-it-net - -# Build Druid Cluster Image -docker build -t druid/cluster $SHARED_DIR/docker - -# Start zookeeper and kafka -docker run -d --privileged --net druid-it-net --ip 172.172.172.2 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-zookeeper-kafka -p 2181:2181 -p 9092:9092 -p 9093:9093 -v $SHARED_DIR:/shared -v $DOCKERDIR/zookeeper.conf:$SUPERVISORDIR/zookeeper.conf -v $DOCKERDIR/kafka.conf:$SUPERVISORDIR/kafka.conf druid/cluster - -# Start MYSQL -docker run -d --privileged --net druid-it-net --ip 172.172.172.3 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-metadata-storage -v $SHARED_DIR:/shared -v $DOCKERDIR/metadata-storage.conf:$SUPERVISORDIR/metadata-storage.conf druid/cluster - -# Start Overlord -docker run -d --privileged --net druid-it-net --ip 172.172.172.4 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-overlord -p 8090:8090 -p 8290:8290 -v $SHARED_DIR:/shared -v $DOCKERDIR/overlord.conf:$SUPERVISORDIR/overlord.conf --link druid-metadata-storage:druid-metadata-storage --link druid-zookeeper-kafka:druid-zookeeper-kafka druid/cluster - -# Start Coordinator -docker run -d --privileged --net druid-it-net --ip 172.172.172.5 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-coordinator -p 8081:8081 -p 8281:8281 -v $SHARED_DIR:/shared -v $DOCKERDIR/coordinator.conf:$SUPERVISORDIR/coordinator.conf --link druid-overlord:druid-overlord --link druid-metadata-storage:druid-metadata-storage --link druid-zookeeper-kafka:druid-zookeeper-kafka druid/cluster - -# Start Historical -docker run -d --privileged --net druid-it-net --ip 172.172.172.6 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-historical -p 8083:8083 -p 8283:8283 -v $SHARED_DIR:/shared -v $DOCKERDIR/historical.conf:$SUPERVISORDIR/historical.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka druid/cluster - -# Start Middlemanger -docker run -d --privileged --net druid-it-net --ip 172.172.172.7 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-middlemanager -p 8091:8091 -p 8291:8291 -p 8100:8100 -p 8101:8101 -p 8102:8102 -p 8103:8103 -p 8104:8104 -p 8105:8105 -p 8300:8300 -p 8301:8301 -p 8302:8302 -p 8303:8303 -p 8304:8304 -p 8305:8305 -v $RESOURCEDIR:/resources -v $SHARED_DIR:/shared -v $DOCKERDIR/middlemanager.conf:$SUPERVISORDIR/middlemanager.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-overlord:druid-overlord druid/cluster - -# Start Broker -docker run -d --privileged --net druid-it-net --ip 172.172.172.8 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-broker -p 8082:8082 -p 8282:8282 -v $SHARED_DIR:/shared -v $DOCKERDIR/broker.conf:$SUPERVISORDIR/broker.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-middlemanager:druid-middlemanager --link druid-historical:druid-historical druid/cluster - -# Start Router -docker run -d --privileged --net druid-it-net --ip 172.172.172.9 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-router -p 8888:8888 -p 9088:9088 -v $SHARED_DIR:/shared -v $DOCKERDIR/router.conf:$SUPERVISORDIR/router.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-coordinator:druid-coordinator --link druid-broker:druid-broker druid/cluster - -# Start Router with permissive TLS settings (client auth enabled, no hostname verification, no revocation check) -docker run -d --privileged --net druid-it-net --ip 172.172.172.10 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-router-permissive-tls -p 8889:8889 -p 9089:9089 -v $SHARED_DIR:/shared -v $DOCKERDIR/router-permissive-tls.conf:$SUPERVISORDIR/router-permissive-tls.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-coordinator:druid-coordinator --link druid-broker:druid-broker druid/cluster - -# Start Router with TLS but no client auth -docker run -d --privileged --net druid-it-net --ip 172.172.172.11 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --name druid-router-no-client-auth-tls -p 8890:8890 -p 9090:9090 -v $SHARED_DIR:/shared -v $DOCKERDIR/router-no-client-auth-tls.conf:$SUPERVISORDIR/router-no-client-auth-tls.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-coordinator:druid-coordinator --link druid-broker:druid-broker druid/cluster - -# Start Router with custom TLS cert checkers -docker run -d --privileged --net druid-it-net --ip 172.172.172.12 -e LANG=C.UTF-8 -e LANGUAGE=C.UTF-8 -e LC_ALL=C.UTF-8 --hostname druid-router-custom-check-tls --name druid-router-custom-check-tls -p 8891:8891 -p 9091:9091 -v $SHARED_DIR:/shared -v $DOCKERDIR/router-custom-check-tls.conf:$SUPERVISORDIR/router-custom-check-tls.conf --link druid-zookeeper-kafka:druid-zookeeper-kafka --link druid-coordinator:druid-coordinator --link druid-broker:druid-broker druid/cluster diff --git a/integration-tests/run_cluster_using_java_runtime_11.sh b/integration-tests/run_cluster_using_java_runtime_11.sh new file mode 100755 index 00000000000..b88990bcd89 --- /dev/null +++ b/integration-tests/run_cluster_using_java_runtime_11.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Source the common script +DIR="${BASH_SOURCE%/*}" +if [[ ! -d "$DIR" ]]; then DIR="$PWD"; fi +. "$DIR/common_run_cluster.sh" + +cleanup + +setup + +create_docker_network + +# Build Druid Cluster Image (Image running Java 11) +docker build -t druid/cluster --build-arg DOCKER_IMAGE=imply/druiditbase:openjdk-11.0.5-1 $SHARED_DIR/docker + +start_docker_containers diff --git a/integration-tests/run_cluster_using_java_runtime_8.sh b/integration-tests/run_cluster_using_java_runtime_8.sh new file mode 100755 index 00000000000..b86e095c450 --- /dev/null +++ b/integration-tests/run_cluster_using_java_runtime_8.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Source the common script +DIR="${BASH_SOURCE%/*}" +if [[ ! -d "$DIR" ]]; then DIR="$PWD"; fi +. "$DIR/common_run_cluster.sh" + +cleanup + +setup + +create_docker_network + +# Build Druid Cluster Image (Image running Java 8) +docker build -t druid/cluster --build-arg DOCKER_IMAGE=imply/druiditbase:openjdk-1.8.0_191-1 $SHARED_DIR/docker + +start_docker_containers