druid/.travis.yml

689 lines
34 KiB
YAML

# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
branches:
only:
- master
- /^\d+\.\d+\.\d+(-\S*)?$/ # release branches
language: java
dist: xenial
jdk:
- openjdk8
cache:
directories:
- $HOME/.m2
env:
global:
- DOCKER_IP=127.0.0.1 # for integration tests
- MVN="mvn -B"
- > # Various options to make execution of maven goals faster (e.g., mvn install)
MAVEN_SKIP="-P skip-static-checks -Dweb.console.skip=true -Dmaven.javadoc.skip=true"
- MAVEN_SKIP_TESTS="-P skip-tests"
addons:
apt:
packages:
- maven
- python3
# Add various options to make 'mvn install' fast and skip javascript compile (-Dweb.console.skip=true) since it is not
# needed. Depending on network speeds, "mvn -q install" may take longer than the default 10 minute timeout to print any
# output. To compensate, use travis_wait to extend the timeout.
install: ./check_test_suite.py && travis_terminate 0 || echo 'Running Maven install...' && MAVEN_OPTS='-Xmx3000m' travis_wait 15 ${MVN} clean install -q -ff -pl '!distribution,!:druid-it-image,!:druid-it-cases' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C && ${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
# There are 3 stages of tests
# 1. Tests - phase 1
# 2. Tests - phase 2
# 3. cron
#
# The cron type only runs jobs that are marked with stage cron. The cron stage also runs alongside
# phase 1 and phase 2 for commit builds on release branches.
# The test type is split into 2 stages. This is done so that more PRs can run their validation
# in parallel. The first phase is meant to include sanity test jobs. The jobs in this phase are
# meant to be fast. The second phase is meant to run all other tests. Cron stage does not run on pull requests.
# Jobs with known flaky tests should be put in the second phase since the second phase will not
# start if there are any failures in the second stage.
stages:
- name: Tests - phase 1
if: type != cron
- name: Tests - phase 2
if: type != cron
- name: cron
if: type = cron OR (type != pull_request AND branch != master)
jobs:
include:
- &test_processing_module
name: "(openjdk8) processing module test"
stage: Tests - phase 1
env:
- MAVEN_PROJECTS='processing'
before_script:
- export DRUID_USE_DEFAULT_VALUE_FOR_NULL=true
script:
- unset _JAVA_OPTIONS
# Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when
# resolving the SIGAR dependency.
- >
MAVEN_OPTS='-Xmx2048m' ${MVN} test -pl ${MAVEN_PROJECTS}
${MAVEN_SKIP} -Dremoteresources.skip=true -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL}
- sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
- free -m
- ${MVN} -pl ${MAVEN_PROJECTS} jacoco:report
# Add merge target branch to determine diff (see https://github.com/travis-ci/travis-ci/issues/6069).
# This is not needed for build triggered by tags, since there will be no code diff.
- echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" # for debugging
- if [[ -z "${TRAVIS_TAG}" ]]; then git remote set-branches --add origin ${TRAVIS_BRANCH} && git fetch; fi
# Determine the modified files that match the maven projects being tested. We use maven project lists that
# either exclude (starts with "!") or include (does not start with "!"), so both cases need to be handled.
# If the build is triggered by a tag, an error will be printed, but `all_files` will be correctly set to empty
# so that the coverage check is skipped.
- all_files="$(git diff --name-only origin/${TRAVIS_BRANCH}...HEAD | grep "\.java$" || [[ $? == 1 ]])"
- for f in ${all_files}; do echo $f; done # for debugging
- >
if [[ "${MAVEN_PROJECTS}" = \!* ]]; then
regex="${MAVEN_PROJECTS:1}";
regex="^${regex//,\!/\\|^}";
project_files="$(echo "${all_files}" | grep -v "${regex}" || [[ $? == 1 ]])";
else
regex="^${MAVEN_PROJECTS//,/\\|^}";
project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])";
fi
- for f in ${project_files}; do echo $f; done # for debugging
# Check diff code coverage for the maven projects being tested (retry install in case of network error).
# Currently, the function coverage check is not reliable, so it is disabled.
- >
if [ -n "${project_files}" ]; then
travis_retry npm install @connectis/diff-test-coverage@1.5.3
&& git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files}
| node_modules/.bin/diff-test-coverage
--coverage "**/target/site/jacoco/jacoco.xml"
--type jacoco
--line-coverage 50
--branch-coverage 50
--function-coverage 0
--log-template "coverage-lines-complete"
--log-template "coverage-files-complete"
--log-template "totals-complete"
--log-template "errors"
--
|| { printf "\n\n****FAILED****\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\nFor more details on how to run code coverage locally, follow instructions here - https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md#running-code-coverage-locally\n\n" && false; }
fi
after_success:
# retry in case of network error
- travis_retry curl -o codecov.sh -s https://codecov.io/bash
- travis_retry bash codecov.sh -X gcov
- <<: *test_processing_module
name: "(openjdk11) processing module test"
stage: Tests - phase 2
jdk: openjdk11
- <<: *test_processing_module
name: "(openjdk17) processing module test"
stage: Tests - phase 2
jdk: openjdk17
- &test_processing_module_sqlcompat
<<: *test_processing_module
name: "(openjdk8) processing module test (SQL Compatibility)"
stage: Tests - phase 1
before_script: &setup_sqlcompat
- export DRUID_USE_DEFAULT_VALUE_FOR_NULL=false
- <<: *test_processing_module_sqlcompat
name: "(openjdk11) processing module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk11
- <<: *test_processing_module_sqlcompat
name: "(openjdk17) processing module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk17
- &test_indexing_module
<<: *test_processing_module
name: "(openjdk8) indexing modules test"
env:
- MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service'
- <<: *test_indexing_module
name: "(openjdk11) indexing modules test"
stage: Tests - phase 2
jdk: openjdk11
- <<: *test_indexing_module
name: "(openjdk17) indexing modules test"
stage: Tests - phase 2
jdk: openjdk17
- &test_indexing_module_sqlcompat
<<: *test_indexing_module
name: "(openjdk8) indexing modules test (SQL Compatibility)"
stage: Tests - phase 1
before_script: *setup_sqlcompat
- <<: *test_indexing_module_sqlcompat
name: "(openjdk11) indexing modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk11
- <<: *test_indexing_module_sqlcompat
name: "(openjdk17) indexing modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk17
- &test_server_module
<<: *test_processing_module
name: "(openjdk8) server module test"
env:
- MAVEN_PROJECTS='server'
- <<: *test_server_module
name: "(openjdk11) server module test"
stage: Tests - phase 2
jdk: openjdk11
- <<: *test_server_module
name: "(openjdk17) server module test"
stage: Tests - phase 2
jdk: openjdk17
- &test_server_module_sqlcompat
<<: *test_server_module
name: "(openjdk8) server module test (SQL Compatibility)"
before_script: *setup_sqlcompat
- <<: *test_server_module_sqlcompat
name: "(openjdk11) server module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk11
- <<: *test_server_module_sqlcompat
name: "(openjdk17) server module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk17
- &test_other_modules
<<: *test_processing_module
name: "(openjdk8) other modules test"
env:
- MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests,!:druid-it-tools,!:druid-it-image,!:druid-it-cases'
- <<: *test_other_modules
name: "(openjdk11) other modules test"
stage: Tests - phase 2
jdk: openjdk11
- <<: *test_other_modules
name: "(openjdk17) other modules test"
stage: Tests - phase 2
jdk: openjdk17
- &test_other_modules_sqlcompat
<<: *test_other_modules
name: "(openjdk8) other modules test (SQL Compatibility)"
before_script: *setup_sqlcompat
- <<: *test_other_modules_sqlcompat
name: "(openjdk11) other modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk11
- <<: *test_other_modules_sqlcompat
name: "(openjdk17) other modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk17
- name: "Build and test on ARM64 CPU architecture (1)"
stage: Tests - phase 2
arch: arm64-graviton2
dist: focal
virt: vm
group: edge
jdk: openjdk11
env:
- MAVEN_PROJECTS='core,indexing-hadoop,indexing-service,processing'
script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Dweb.console.skip=true -DargLine=-Xmx3000m
- name: "Build and test on ARM64 CPU architecture (2)"
stage: Tests - phase 2
arch: arm64-graviton2
dist: focal
virt: vm
group: edge
jdk: openjdk11
env:
- MAVEN_PROJECTS='core,sql,server,services'
script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Dweb.console.skip=true -DargLine=-Xmx3000m
# Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key)
# Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=<JVM_RUNTIME_VERSION>)
# Integration tests will either use MiddleManagers or Indexers
# (Currently integration tests only support running with jvm runtime 8 and 11)
# START - Integration tests for Compile with Java 8 and Run with Java 8
- &integration_batch_index
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test"
stage: Tests - phase 2
jdk: openjdk8
services: &integration_test_services
- docker
env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: &run_integration_test
- ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} -Dit.indexer=${USE_INDEXER} ${MAVEN_SKIP} -Doverride.config.path=${OVERRIDE_CONFIG_PATH}
after_failure: &integration_test_diags
- for v in ~/shared/logs/*.log ; do
echo $v logtail ======================== ; tail -100 $v ;
done
- for v in broker middlemanager overlord router coordinator historical ; do
echo $v dmesg ======================== ;
docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
done
- &integration_input_format
name: "(Compile=openjdk8, Run=openjdk8) input format integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_input_format
name: "(Compile=openjdk8, Run=openjdk8) input format integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_input_source
name: "(Compile=openjdk8, Run=openjdk8) input source integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_input_source
name: "(Compile=openjdk8, Run=openjdk8) input source integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test"
jdk: openjdk8
stage: Tests - phase 2
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- <<: *integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store"
env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store'
- <<: *integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store with indexer"
env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store'
- &integration_kafka_index
name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_kafka_index
name: "(Compile=openjdk8, Run=openjdk8) kafka index, transactional kafka index integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=kafka-index,kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- <<: *integration_kafka_index
name: "(Compile=openjdk8, Run=openjdk8) custom coordinator duties integration test"
env: TESTNG_GROUPS='-Dgroups=custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/custom-coordinator-duties'
- &integration_kafka_index_slow
name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_kafka_index_slow
name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow with Indexer"
env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_kafka_transactional_index
name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_kafka_transactional_index_slow
name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_kafka_transactional_index_slow
name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow with Indexer"
env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_kafka_format_tests
name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_kafka_format_tests
name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats with Indexer"
env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_query
name: "(Compile=openjdk8, Run=openjdk8) query integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_query_retry
name: "(Compile=openjdk8, Run=openjdk8) query retry integration test for missing segments"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_query_error
name: "(Compile=openjdk8, Run=openjdk8) query error integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_security
name: "(Compile=openjdk8, Run=openjdk8) security integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_ldap_security
name: "(Compile=openjdk8, Run=openjdk8) ldap security integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_realtime_index
name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_append_ingestion
name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_append_ingestion
name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_compaction_tests
name: "(Compile=openjdk8, Run=openjdk8) compaction integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_compaction_tests
name: "(Compile=openjdk8, Run=openjdk8) compaction integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_tests
name: "(Compile=openjdk8, Run=openjdk8) other integration tests"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer"
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
#- <<: *integration_tests
# name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests"
# jdk: openjdk8
# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_query
name: "(Compile=openjdk8, Run=openjdk8) query integration test (mariaDB)"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=query' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
# Revised ITs.
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests (new)"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
# Uses the installation defined above. Then, builds the test tools and docker image,
# and runs one IT. If tests fail, echos log lines of any of
# the Druid services that did not exit normally.
script: ./it.sh travis HighAvailability
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) multi stage query tests"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: ./it.sh travis MultiStageQuery
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) catalog integration tests"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: ./it.sh travis Catalog
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer (new)"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
script: ./it.sh travis BatchIndex
# END - Integration tests for Compile with Java 8 and Run with Java 8
# START - Integration tests for Compile with Java 8 and Run with Java 11
- <<: *integration_batch_index
name: "(Compile=openjdk8, Run=openjdk11) batch index integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_input_format
name: "(Compile=openjdk8, Run=openjdk11) input format integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_input_source
name: "(Compile=openjdk8, Run=openjdk11) input source integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk11) perfect rollup parallel batch index integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_query
name: "(Compile=openjdk8, Run=openjdk11) query integration test"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_query_retry
name: "(Compile=openjdk8, Run=openjdk11) query retry integration test for missing segments"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_query_error
name: "(Compile=openjdk8, Run=openjdk11) query error integration test for missing segments"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_security
name: "(Compile=openjdk8, Run=openjdk11) security integration test"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_ldap_security
name: "(Compile=openjdk8, Run=openjdk11) ldap security integration test"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_realtime_index
name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_append_ingestion
name: "(Compile=openjdk8, Run=openjdk11) append ingestion integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_compaction_tests
name: "(Compile=openjdk8, Run=openjdk11) compaction integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk11) other integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk11) leadership and high availability integration tests"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_query
name: "(Compile=openjdk8, Run=openjdk11) query integration test (mariaDB)"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
# END - Integration tests for Compile with Java 8 and Run with Java 11
- &integration_batch_index_k8s
name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test"
stage: Tests - phase 2
jdk: openjdk8
services: &integration_test_services_k8s
- docker
env: CONFIG_FILE='k8s_run_config_file.json' IT_TEST='-Dit.test=ITNestedQueryPushDownTest' POD_NAME=int-test POD_NAMESPACE=default BUILD_DRUID_CLSUTER=true
script: &run_integration_test_k8s
- ${MVN} verify -pl integration-tests -P int-tests-config-file ${IT_TEST} ${MAVEN_SKIP} -Dpod.name=${POD_NAME} -Dpod.namespace=${POD_NAMESPACE} -Dbuild.druid.cluster=${BUILD_DRUID_CLSUTER}
after_failure: &integration_test_diags_k8s
- for v in broker middlemanager router coordinator historical ; do
echo "------------------------druid-tiny-cluster-"$v"s-0-------------------------";
sudo /usr/local/bin/kubectl logs --tail 1000 druid-tiny-cluster-"$v"s-0;
done
- &security_vulnerabilities
name: "security vulnerabilities"
stage: cron
install: skip
script: |-
${MVN} dependency-check:purge dependency-check:check ${HADOOP_PROFILE} || { echo "
The OWASP dependency check has found security vulnerabilities. Please use a newer version
of the dependency that does not have vulnerabilities. To see a report run
`mvn dependency-check:check`
If the analysis has false positives,
they can be suppressed by adding entries to owasp-dependency-check-suppressions.xml (for more
information, see https://jeremylong.github.io/DependencyCheck/general/suppression.html).
" && false; }
- <<: *security_vulnerabilities
name: "security vulnerabilities with Hadoop3"
env:
- HADOOP_PROFILE='-Phadoop3'
# Travis CI only supports per build (and not per-job notifications): https://github.com/travis-ci/travis-ci/issues/9888
notifications:
email:
if: type = cron
recipients:
# This is the string "dev@druid.apache.org" encrypted against the apache/druid repo so that forks are unable to
# use this notification:
# https://github.com/travis-ci/travis-ci/issues/1094#issuecomment-215019909
# https://github.com/travis-ci/travis-ci/issues/2711
- secure: "MupjX/0jLwh3XzHPl74BTk2/Kp5r+8TrEewfRhpQdWKFMBXLKNqu0k2VXf5C/NIg3uvPianq3REk+qeTHI8dL2ShjiWS/eIRkJOHLfObdNNBuos5fo4TxAuBQcXyT4VjAq5jnAkH84Pxf2Nl0rkisWoIhvwSX7+kNrjW1qdu7K0="
on_success: change
on_failure: always