remove Travis CI (#13789)

This commit is contained in:
Tejaswini Bandlamudi 2023-02-10 15:16:56 +05:30 committed by GitHub
parent d7b95988d7
commit 752964390e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 3 additions and 866 deletions

View File

@ -1,679 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
branches:
only:
- master
- /^\d+\.\d+\.\d+(-\S*)?$/ # release branches
language: java
dist: xenial
jdk:
- openjdk8
cache:
directories:
- $HOME/.m2
env:
global:
- DOCKER_IP=127.0.0.1 # for integration tests
- MVN="mvn -B"
- > # Various options to make execution of maven goals faster (e.g., mvn install)
MAVEN_SKIP="-P skip-static-checks -Dweb.console.skip=true -Dmaven.javadoc.skip=true"
- MAVEN_SKIP_TESTS="-P skip-tests"
addons:
apt:
packages:
- maven
- python3
# Add various options to make 'mvn install' fast and skip javascript compile (-Dweb.console.skip=true) since it is not
# needed. Depending on network speeds, "mvn -q install" may take longer than the default 10 minute timeout to print any
# output. To compensate, use travis_wait to extend the timeout.
install: ./check_test_suite.py && travis_terminate 0 || echo 'Running Maven install...' && MAVEN_OPTS='-Xmx3000m' travis_wait 15 ${MVN} clean install -q -ff -pl '!distribution,!:druid-it-image,!:druid-it-cases' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C && ${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
# There are 3 stages of tests
# 1. Tests - phase 1 (completely migrated to github workflows)
# 2. Tests - phase 2
# 3. cron
#
# The cron type only runs jobs that are marked with stage cron. The cron stage also runs alongside
# phase 1 and phase 2 for commit builds on release branches.
# The test type is split into 2 stages. This is done so that more PRs can run their validation
# in parallel. The first phase is meant to include sanity test jobs. The jobs in this phase are
# meant to be fast. The second phase is meant to run all other tests. Cron stage does not run on pull requests.
# Jobs with known flaky tests should be put in the second phase since the second phase will not
# start if there are any failures in the second stage.
stages:
- name: Tests - phase 1
if: type != cron
- name: Tests - phase 2
if: type != cron
- name: cron
if: type = cron OR (type != pull_request AND branch != master)
jobs:
include:
- &test_processing_module
name: "(openjdk11) processing module test"
stage: Tests - phase 2
jdk: openjdk11
env:
- MAVEN_PROJECTS='processing'
before_script:
- export DRUID_USE_DEFAULT_VALUE_FOR_NULL=true
script:
- unset _JAVA_OPTIONS
# Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when
# resolving the SIGAR dependency.
- >
MAVEN_OPTS='-Xmx2048m' ${MVN} test -pl ${MAVEN_PROJECTS}
${MAVEN_SKIP} -Dremoteresources.skip=true -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL}
- sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
- free -m
- travis_wait 15 ${MVN} -pl ${MAVEN_PROJECTS} jacoco:report
# Add merge target branch to determine diff (see https://github.com/travis-ci/travis-ci/issues/6069).
# This is not needed for build triggered by tags, since there will be no code diff.
- echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" # for debugging
- if [[ -z "${TRAVIS_TAG}" ]]; then git remote set-branches --add origin ${TRAVIS_BRANCH} && git fetch; fi
# Determine the modified files that match the maven projects being tested. We use maven project lists that
# either exclude (starts with "!") or include (does not start with "!"), so both cases need to be handled.
# If the build is triggered by a tag, an error will be printed, but `all_files` will be correctly set to empty
# so that the coverage check is skipped.
- all_files="$(git diff --name-only origin/${TRAVIS_BRANCH}...HEAD | grep "\.java$" || [[ $? == 1 ]])"
- for f in ${all_files}; do echo $f; done # for debugging
- >
if [[ "${MAVEN_PROJECTS}" = \!* ]]; then
regex="${MAVEN_PROJECTS:1}";
regex="^${regex//,\!/\\|^}";
project_files="$(echo "${all_files}" | grep -v "${regex}" || [[ $? == 1 ]])";
else
regex="^${MAVEN_PROJECTS//,/\\|^}";
project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])";
fi
- for f in ${project_files}; do echo $f; done # for debugging
# Check diff code coverage for the maven projects being tested (retry install in case of network error).
# Currently, the function coverage check is not reliable, so it is disabled.
- >
if [ -n "${project_files}" ]; then
travis_retry npm install @connectis/diff-test-coverage@1.5.3
&& git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files}
| node_modules/.bin/diff-test-coverage
--coverage "**/target/site/jacoco/jacoco.xml"
--type jacoco
--line-coverage 50
--branch-coverage 50
--function-coverage 0
--log-template "coverage-lines-complete"
--log-template "coverage-files-complete"
--log-template "totals-complete"
--log-template "errors"
--
|| { printf "\n\n****FAILED****\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\nFor more details on how to run code coverage locally, follow instructions here - https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md#running-code-coverage-locally\n\n" && false; }
fi
after_success:
# retry in case of network error
- travis_retry curl -o codecov.sh -s https://codecov.io/bash
- travis_retry bash codecov.sh -X gcov
- <<: *test_processing_module
name: "(openjdk17) processing module test"
stage: Tests - phase 2
jdk: openjdk17
- &test_processing_module_sqlcompat
<<: *test_processing_module
name: "(openjdk11) processing module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk11
before_script: &setup_sqlcompat
- export DRUID_USE_DEFAULT_VALUE_FOR_NULL=false
- <<: *test_processing_module_sqlcompat
name: "(openjdk17) processing module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk17
- &test_indexing_module
<<: *test_processing_module
name: "(openjdk11) indexing modules test"
stage: Tests - phase 2
jdk: openjdk11
env:
- MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service'
- <<: *test_indexing_module
name: "(openjdk17) indexing modules test"
stage: Tests - phase 2
jdk: openjdk17
- &test_indexing_module_sqlcompat
<<: *test_indexing_module
name: "(openjdk11) indexing modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk11
before_script: *setup_sqlcompat
- <<: *test_indexing_module_sqlcompat
name: "(openjdk17) indexing modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk17
- &test_server_module
<<: *test_processing_module
name: "(openjdk11) server module test"
stage: Tests - phase 2
jdk: openjdk11
env:
- MAVEN_PROJECTS='server'
- <<: *test_server_module
name: "(openjdk17) server module test"
stage: Tests - phase 2
jdk: openjdk17
- &test_server_module_sqlcompat
<<: *test_server_module
name: "(openjdk11) server module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk11
before_script: *setup_sqlcompat
- <<: *test_server_module_sqlcompat
name: "(openjdk17) server module test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk17
- &test_other_modules
<<: *test_processing_module
name: "(openjdk11) other modules test"
stage: Tests - phase 2
jdk: openjdk11
env:
- MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests,!:druid-it-tools,!:druid-it-image,!:druid-it-cases'
- <<: *test_other_modules
name: "(openjdk17) other modules test"
stage: Tests - phase 2
jdk: openjdk17
- &test_other_modules_sqlcompat
<<: *test_other_modules
name: "(openjdk11) other modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk11
before_script: *setup_sqlcompat
- <<: *test_other_modules_sqlcompat
name: "(openjdk17) other modules test (SQL Compatibility)"
stage: Tests - phase 2
jdk: openjdk17
- name: "Build and test on ARM64 CPU architecture (1)"
stage: Tests - phase 2
arch: arm64-graviton2
dist: focal
virt: vm
group: edge
jdk: openjdk11
env:
- MAVEN_PROJECTS='core,indexing-hadoop,indexing-service,processing'
script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Dweb.console.skip=true -DargLine=-Xmx3000m
- name: "Build and test on ARM64 CPU architecture (2)"
stage: Tests - phase 2
arch: arm64-graviton2
dist: focal
virt: vm
group: edge
jdk: openjdk11
env:
- MAVEN_PROJECTS='core,sql,server,services'
script: ${MVN} test -B -pl ${MAVEN_PROJECTS} -Dweb.console.skip=true -DargLine=-Xmx3000m
# Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key)
# Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=<JVM_RUNTIME_VERSION>)
# Integration tests will either use MiddleManagers or Indexers
# (Currently integration tests only support running with jvm runtime 8 and 11)
# START - Integration tests for Compile with Java 8 and Run with Java 8
- &integration_batch_index
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test"
stage: Tests - phase 2
jdk: openjdk8
services: &integration_test_services
- docker
env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: &run_integration_test
- ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} -Dit.indexer=${USE_INDEXER} ${MAVEN_SKIP} -Doverride.config.path=${OVERRIDE_CONFIG_PATH}
after_failure: &integration_test_diags
- for v in ~/shared/logs/*.log ; do
echo $v logtail ======================== ; tail -100 $v ;
done
- for v in broker middlemanager overlord router coordinator historical ; do
echo $v dmesg ======================== ;
docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
done
- &integration_input_format
name: "(Compile=openjdk8, Run=openjdk8) input format integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_input_source
name: "(Compile=openjdk8, Run=openjdk8) input source integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test"
jdk: openjdk8
stage: Tests - phase 2
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- <<: *integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store"
env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store'
- <<: *integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with deep storage as intermediate store with indexer"
env: TESTNG_GROUPS='-Dgroups=shuffle-deep-store' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/shuffle-deep-store'
- &integration_kafka_index
name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_kafka_index
name: "(Compile=openjdk8, Run=openjdk8) kafka index, transactional kafka index integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=kafka-index,kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- <<: *integration_kafka_index
name: "(Compile=openjdk8, Run=openjdk8) custom coordinator duties integration test"
env: TESTNG_GROUPS='-Dgroups=custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/custom-coordinator-duties'
- &integration_kafka_index_slow
name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_kafka_index_slow
name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow with Indexer"
env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_kafka_transactional_index
name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_kafka_transactional_index_slow
name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_kafka_transactional_index_slow
name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow with Indexer"
env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_kafka_format_tests
name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_kafka_format_tests
name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats with Indexer"
env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_query
name: "(Compile=openjdk8, Run=openjdk8) query integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_query_retry
name: "(Compile=openjdk8, Run=openjdk8) query retry integration test for missing segments"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_query_error
name: "(Compile=openjdk8, Run=openjdk8) query error integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_security
name: "(Compile=openjdk8, Run=openjdk8) security integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_ldap_security
name: "(Compile=openjdk8, Run=openjdk8) ldap security integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_realtime_index
name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- &integration_append_ingestion
name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_append_ingestion
name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_compaction_tests
name: "(Compile=openjdk8, Run=openjdk8) compaction integration test"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_compaction_tests
name: "(Compile=openjdk8, Run=openjdk8) compaction integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_tests
name: "(Compile=openjdk8, Run=openjdk8) other integration tests"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer"
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
#- <<: *integration_tests
# name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests"
# jdk: openjdk8
# env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_query
name: "(Compile=openjdk8, Run=openjdk8) query integration test (mariaDB)"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=query' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
# Revised ITs.
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability integration tests (new)"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
# Uses the installation defined above. Then, builds the test tools and docker image,
# and runs one IT. If tests fail, echos log lines of any of
# the Druid services that did not exit normally.
script: ./it.sh travis HighAvailability
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) multi stage query tests"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: ./it.sh travis MultiStageQuery
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) multi stage query tests with MM"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: ./it.sh travis MultiStageQueryWithMM
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) catalog integration tests"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
script: ./it.sh travis Catalog
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer (new)"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
script: ./it.sh travis BatchIndex
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) input source integration test with Indexer (new)"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
script: ./it.sh travis InputSource
- &integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) input format integration test with Indexer (new)"
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
script: ./it.sh travis InputFormat
# END - Integration tests for Compile with Java 8 and Run with Java 8
# START - Integration tests for Compile with Java 8 and Run with Java 11
- <<: *integration_batch_index
name: "(Compile=openjdk8, Run=openjdk11) batch index integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_input_format
name: "(Compile=openjdk8, Run=openjdk11) input format integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_input_source
name: "(Compile=openjdk8, Run=openjdk11) input source integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_perfect_rollup_parallel_batch_index
name: "(Compile=openjdk8, Run=openjdk11) perfect rollup parallel batch index integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_query
name: "(Compile=openjdk8, Run=openjdk11) query integration test"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_query_retry
name: "(Compile=openjdk8, Run=openjdk11) query retry integration test for missing segments"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_query_error
name: "(Compile=openjdk8, Run=openjdk11) query error integration test for missing segments"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=query-error' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_security
name: "(Compile=openjdk8, Run=openjdk11) security integration test"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_ldap_security
name: "(Compile=openjdk8, Run=openjdk11) ldap security integration test"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=ldap-security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_realtime_index
name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_append_ingestion
name: "(Compile=openjdk8, Run=openjdk11) append ingestion integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_compaction_tests
name: "(Compile=openjdk8, Run=openjdk11) compaction integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk11) other integration test"
jdk: openjdk8
stage: cron
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,query-error,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction,high-availability,upgrade,shuffle-deep-store,custom-coordinator-duties' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk11) leadership and high availability integration tests"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=high-availability' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
- <<: *integration_query
name: "(Compile=openjdk8, Run=openjdk11) query integration test (mariaDB)"
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
# END - Integration tests for Compile with Java 8 and Run with Java 11
- &integration_batch_index_k8s
name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) ITNestedQueryPushDownTest integration test"
stage: Tests - phase 2
jdk: openjdk8
services: &integration_test_services_k8s
- docker
env: CONFIG_FILE='k8s_run_config_file.json' IT_TEST='-Dit.test=ITNestedQueryPushDownTest' POD_NAME=int-test POD_NAMESPACE=default BUILD_DRUID_CLSUTER=true
script: &run_integration_test_k8s
- ${MVN} verify -pl integration-tests -P int-tests-config-file ${IT_TEST} ${MAVEN_SKIP} -Dpod.name=${POD_NAME} -Dpod.namespace=${POD_NAMESPACE} -Dbuild.druid.cluster=${BUILD_DRUID_CLSUTER}
after_failure: &integration_test_diags_k8s
- for v in broker middlemanager router coordinator historical ; do
echo "------------------------druid-tiny-cluster-"$v"s-0-------------------------";
sudo /usr/local/bin/kubectl logs --tail 1000 druid-tiny-cluster-"$v"s-0;
done
- &security_vulnerabilities
name: "security vulnerabilities"
stage: cron
install: skip
script: |-
${MVN} dependency-check:purge dependency-check:check ${HADOOP_PROFILE} || { echo "
The OWASP dependency check has found security vulnerabilities. Please use a newer version
of the dependency that does not have vulnerabilities. To see a report run
`mvn dependency-check:check`
If the analysis has false positives,
they can be suppressed by adding entries to owasp-dependency-check-suppressions.xml (for more
information, see https://jeremylong.github.io/DependencyCheck/general/suppression.html).
" && false; }
- <<: *security_vulnerabilities
name: "security vulnerabilities with Hadoop3"
env:
- HADOOP_PROFILE='-Phadoop3'
# Travis CI only supports per build (and not per-job notifications): https://github.com/travis-ci/travis-ci/issues/9888
notifications:
email:
if: type = cron
recipients:
# This is the string "dev@druid.apache.org" encrypted against the apache/druid repo so that forks are unable to
# use this notification:
# https://github.com/travis-ci/travis-ci/issues/1094#issuecomment-215019909
# https://github.com/travis-ci/travis-ci/issues/2711
- secure: "MupjX/0jLwh3XzHPl74BTk2/Kp5r+8TrEewfRhpQdWKFMBXLKNqu0k2VXf5C/NIg3uvPianq3REk+qeTHI8dL2ShjiWS/eIRkJOHLfObdNNBuos5fo4TxAuBQcXyT4VjAq5jnAkH84Pxf2Nl0rkisWoIhvwSX7+kNrjW1qdu7K0="
on_success: change
on_failure: always

View File

@ -28,7 +28,7 @@ always_run_jobs = ['license checks', 'license checks with Hadoop3', '(openjdk8)
# of CI can be skipped. however, jobs which are always run will still be run even if only these files are changed # of CI can be skipped. however, jobs which are always run will still be run even if only these files are changed
ignore_prefixes = ['.github', '.idea', '.asf.yaml', '.backportrc.json', '.codecov.yml', '.dockerignore', '.gitignore', ignore_prefixes = ['.github', '.idea', '.asf.yaml', '.backportrc.json', '.codecov.yml', '.dockerignore', '.gitignore',
'.lgtm.yml', 'CONTRIBUTING.md', 'setup-hooks.sh', 'upload.sh', 'dev', 'distribution/docker', '.lgtm.yml', 'CONTRIBUTING.md', 'setup-hooks.sh', 'upload.sh', 'dev', 'distribution/docker',
'distribution/asf-release-process-guide.md', '.travis.yml', 'distribution/asf-release-process-guide.md',
'owasp-dependency-check-suppressions.xml', 'licenses'] 'owasp-dependency-check-suppressions.xml', 'licenses']
script_prefixes = ['check_test_suite.py', 'check_test_suite_test.py'] script_prefixes = ['check_test_suite.py', 'check_test_suite_test.py']

View File

@ -21,13 +21,11 @@ import check_test_suite
class CheckTestSuite(unittest.TestCase): class CheckTestSuite(unittest.TestCase):
def test_always_run(self): def test_always_run(self):
for job in check_test_suite.always_run_jobs: for job in check_test_suite.always_run_jobs:
self.assertEqual(True, check_test_suite.check_should_run_suite(job, ['.travis.yml']))
self.assertEqual(True, check_test_suite.check_should_run_suite(job, ['docs/ingestion/index.md'])) self.assertEqual(True, check_test_suite.check_should_run_suite(job, ['docs/ingestion/index.md']))
self.assertEqual(True, check_test_suite.check_should_run_suite(job, ['web-console/src/views/index.ts'])) self.assertEqual(True, check_test_suite.check_should_run_suite(job, ['web-console/src/views/index.ts']))
self.assertEqual(True, check_test_suite.check_should_run_suite(job, ['core/src/main/java/org/apache/druid/math/expr/Expr.java'])) self.assertEqual(True, check_test_suite.check_should_run_suite(job, ['core/src/main/java/org/apache/druid/math/expr/Expr.java']))
def test_docs(self): def test_docs(self):
self.assertEqual(False, check_test_suite.check_docs('.travis.yml'))
self.assertEqual(False, check_test_suite.check_docs('check_test_suite_test.py')) self.assertEqual(False, check_test_suite.check_docs('check_test_suite_test.py'))
self.assertEqual(True, check_test_suite.check_docs('website/core/Footer.js')) self.assertEqual(True, check_test_suite.check_docs('website/core/Footer.js'))
self.assertEqual(True, check_test_suite.check_docs('docs/ingestion/index.md')) self.assertEqual(True, check_test_suite.check_docs('docs/ingestion/index.md'))
@ -48,7 +46,6 @@ class CheckTestSuite(unittest.TestCase):
def test_web_console(self): def test_web_console(self):
web_console_job = 'web console' web_console_job = 'web console'
e2e_job = 'web console end-to-end test' e2e_job = 'web console end-to-end test'
self.assertEqual(False, check_test_suite.check_console('.travis.yml'))
self.assertEqual(False, check_test_suite.check_console('check_test_suite_test.py')) self.assertEqual(False, check_test_suite.check_console('check_test_suite_test.py'))
self.assertEqual(False, check_test_suite.check_console('website/core/Footer.js')) self.assertEqual(False, check_test_suite.check_console('website/core/Footer.js'))
self.assertEqual(True, check_test_suite.check_console('web-console/assets/azure.png')) self.assertEqual(True, check_test_suite.check_console('web-console/assets/azure.png'))
@ -85,7 +82,6 @@ class CheckTestSuite(unittest.TestCase):
) )
def test_testable_script(self): def test_testable_script(self):
self.assertEqual(False, check_test_suite.check_testable_script('.travis.yml'))
self.assertEqual(True, check_test_suite.check_testable_script('check_test_suite.py')) self.assertEqual(True, check_test_suite.check_testable_script('check_test_suite.py'))
self.assertEqual(True, check_test_suite.check_testable_script('check_test_suite_test.py')) self.assertEqual(True, check_test_suite.check_testable_script('check_test_suite_test.py'))
@ -124,7 +120,6 @@ class CheckTestSuite(unittest.TestCase):
some_java_job = 'spotbugs checks' some_java_job = 'spotbugs checks'
some_non_java_diffs = [ some_non_java_diffs = [
['.travis.yml'],
['check_test_suite_test.py'], ['check_test_suite_test.py'],
['website/core/Footer.js'], ['website/core/Footer.js'],
['web-console/src/views/index.ts'], ['web-console/src/views/index.ts'],

View File

@ -46,7 +46,6 @@
<exclude>.gitignore</exclude> <exclude>.gitignore</exclude>
<exclude>.dockerignore</exclude> <exclude>.dockerignore</exclude>
<exclude>.travis.yml</exclude>
<exclude>README.md</exclude> <exclude>README.md</exclude>
<exclude>README.BINARY</exclude> <exclude>README.BINARY</exclude>
<exclude>publications/**</exclude> <exclude>publications/**</exclude>

View File

@ -83,7 +83,6 @@ test as a JUnit test.
* [Quickstart](docs/quickstart.md) * [Quickstart](docs/quickstart.md)
* [Create a new test](docs/guide.md) * [Create a new test](docs/guide.md)
* [Maven configuration](docs/maven.md) * [Maven configuration](docs/maven.md)
* [Travis integration](docs/travis.md)
* [Docker image](docs/docker.md) * [Docker image](docs/docker.md)
* [Druid configuration](docs/druid-config.md) * [Druid configuration](docs/druid-config.md)
* [Docker Compose configuration](docs/compose.md) * [Docker Compose configuration](docs/compose.md)

View File

@ -20,7 +20,7 @@
# Maven Structure # Maven Structure
The integration tests are built and run as part of Druid's Maven script. The integration tests are built and run as part of Druid's Maven script.
Maven itself is used by hand, and as part of the [Travis](travis.md) build Maven itself is used by hand, and as part of the [GHA](../../.github/workflows/revised-its.yml) build
proces. Running integration tests in maven is a multi-part process. proces. Running integration tests in maven is a multi-part process.
* Build the product `distribution`. * Build the product `distribution`.

View File

@ -1,168 +0,0 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
# Travis Integration
Apache Druid uses Travis to manage builds, including running the integration
tests. You can find the Travis build file at `$DRUID_DEV/.travis.yml`, where
`DRUID_DEV` is the root of your Druid development directory. Information
about Travis can be found at:
* [Documentation](https://docs.travis-ci.com/)
* [Job lifecycle](https://docs.travis-ci.com/user/job-lifecycle/)
* [Environment variables](https://docs.travis-ci.com/user/environment-variables/)
* [Travis file reference](https://config.travis-ci.com/)
* [Travis YAML](https://docs.travis-ci.com/user/build-config-yaml)
## Running ITs In Travis
Travis integration is still experimental. The latest iteration is:
```yaml
- name: "experimental docker tests"
stage: Tests - phase 1
script: ${MVN} install -P test-image,docker-tests -rf :it-tools ${MAVEN_SKIP} -DskipUTs=true
after_failure:
- docker-tests/check-results.sh
```
The above is a Travis job definition. The job "inherits" an `install` task defined
earlier in the file. That install task builds all of Druid and creates the distribution
tarball. Since the tests are isolated in specialized Maven profiles, the `install`
task does not build any of the IT-related artifacts.
We've placed the test run in "Phase 1" for debugging convenience. Later, the tests
will run in "Phase 2" along with the other ITs. Once conversion is complete, the
"previous generation" IT tests will be replaced by the newer revisions.
The `script` runs the ITs. The components of the command line are:
* `install` - Run Maven though the install [lifecycle phase](
https://maven.apache.org/guides/introduction/introduction-to-the-lifecycle.html)
for each module. This allows us to build and install the "testing tools"
(see the [Maven notes](maven.md)). The test image is also built during the
`install` phase. The tests themselves only need the `verify` phase, which occurs
before `install`. `install` does nothing for ITs.
* `-P test-image,docker-tests` - activates the image to build the image
(`test-image`) and then runs the ITs (`docker-tests`).
* `-rf :it-tools` - The `it-tools` module is the first of the IT modules: it contains
the "testing tools" added into the image. Using `-rf` skips all the other projects
which we already built in the Travis `install` step. Doing so saves the time
otherwise required for Maven to figure out it has nothing to do for those modules.
* `${MAVEN_SKIP}` - Omits the static checks: they are not needed for ITs.
* `-DskipUTs=true` - The ITs use the [Maven Failsafe plugin](
https://maven.apache.org/surefire/maven-failsafe-plugin/index.html)
which shares code with the [Maven Surefire plugin](
https://maven.apache.org/surefire/maven-surefire-plugin/index.html). We don't want
to run unit tests. If we did the usual `-DskipTests`, then we'd also disable the
ITs. The `-DskipUTs=true` uses a bit of [Maven trickery](
https://stackoverflow.com/questions/6612344/prevent-unit-tests-but-allow-integration-tests-in-maven)
to skip only the Surefire, but not Faisafe tests.
## Travis Diagnostics
A common failure when running ITs is that they uncover a bug in a Druid service;
typically in the code you added that you want to test. Or, if you are changing the
Docker or Docker Compose infratructure, then the tests will often fail because the
Druid services are mis-configured. (Bad configuration tends to result in services
that don't start, or start and immediately exit.)
The standard way to diagnose such failures is to look at the Druid logs. However,
Travis provides no support for attaching files to a build. The best alternative
seems to be to upload the files somewhere else. As a compromise, the Travis build
will append to the build log a subset of the Druid logs.
Travis has a limit of 4MB per build log, so we can't append the entire log for
every Druid service for every IT. We have to be selective. In most cases, we only
care about the logs for ITs that fail.
Now, it turns out to be *very hard* indeed to capture failues! Eventually, we want
Maven to run many ITs for each test run: we need to know which failed. Each IT
creates its own "shared" directory, so to find the logs, we need to know which IT
failed. Travis does not have this information: Travis only knows that Maven itself
exited with a non-zero status. Maven doesn't know: it only knows that Failsafe
failed the build. Failsafe is designed to run all ITs, then check the results in
the `verify` state, so Maven doesn't even know about the failures.
### Failsafe Error Reports
To work around all this, we mimic Failsafe: we look at the Failsafe error report
in `$DRUID_DEV/docker-tests/<module>/target/failsafe-reports/failsafe-summary.xml`
which looks like this:
```xml
<failsafe-summary ... result="null" timeout="false">
<completed>3</completed>
<errors>1</errors>
<failures>0</failures>
<skipped>0</skipped>
<failureMessage xsi:nil="true" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"/>
</failsafe-summary>
```
The above shows one error and no failures. A successful run will show 0 for the
`errors` tag. This example tells us "something didn't work". The corresponding
Druid service logs are candidates for review.
### Druid Service Failures
The Druid logs are in `$DRUID_DEV/docker-tests/<module>/target/shared/logs`.
We could append all of them, but recall the 4MB limit. We generally are
interested only in those services that failed. So, we look at the logs and
see that a successful run is indicated by a normal Lifecycle shutdown:
```text
2022-04-16T20:54:37,997 INFO [Thread-56] org.apache.druid.java.util.common.lifecycle.Lifecycle - Stopping lifecycle [module] stage [INIT]
```
The key bit of text is:
```text
Stopping lifecycle [module] stage [INIT]
```
This says that 1) we're shutting down the lifecycle (which means no exception was thrown),
and 2) that we got all the way to the end (`[INIT]`). Since Druid emits no final
"exited normally" message, we take the above as the next-best thing.
So, we only care about logs that *don't* have the above line. For those, we want to
append the log to the build output. Or, because of the size limit, we append the
last 100 lines.
All of this is encapsulated in the `docker-tests/check-results.sh` script which
is run if the build fails (in the `after_failure`) tag.
### Druid Log Output
For a failed test, the build log will end with something like this:
```text
======= it-high-availability Failed ==========
broker.log logtail ========================
022-04-16T03:53:10,492 INFO [CoordinatorRuleManager-Exec--0] org.apache.druid.discovery.DruidLeaderClient - Request[http://coordinator-one:8081/druid/coordinator/v1/rules] received redirect response to location [http://coordinator-two:8081/druid/coordinator/v1/rules].
...
```
To keep below the limit, on the first failed test is reported.
The above won't catch all cases: maybe the service exited normally, but might still have
log lines of interest. Since all tests run, those lines could be anywhere in the file
and the scripts can't know which might be of interest. To handle that, we either
have to upload all logs somewhere, or you can use the convenience of the new
IT framework to rerun the tests on your development machine.

11
it.sh
View File

@ -49,8 +49,6 @@ Usage: $0 cmd [category]
start the cluster, run the test for category, and stop the cluster start the cluster, run the test for category, and stop the cluster
tail <category> tail <category>
show the last 20 lines of each container log show the last 20 lines of each container log
travis <category>
run one IT in Travis (build dist, image, run test, tail logs)
github <category> github <category>
run one IT in Github Workflows (run test, tail logs) run one IT in Github Workflows (run test, tail logs)
prune prune
@ -92,7 +90,7 @@ function tail_logs
# pass into tests when running locally. # pass into tests when running locally.
# 3. A file given by the OVERRIDE_ENV environment variable. That is, OVERRIDE_ENV holds # 3. A file given by the OVERRIDE_ENV environment variable. That is, OVERRIDE_ENV holds
# the path to a file of var=value pairs. Historically, this file was created by a # the path to a file of var=value pairs. Historically, this file was created by a
# build environment such as Travis. However, it is actually simpler just to use # build environment such as Github Actions. However, it is actually simpler just to use
# option 1: just set the values in the environment and let Linux pass them through to # option 1: just set the values in the environment and let Linux pass them through to
# this script. # this script.
# 4. Environment variables of the form "druid_" used to create the Druid config file. # 4. Environment variables of the form "druid_" used to create the Druid config file.
@ -230,13 +228,6 @@ case $CMD in
prepare_category $1 prepare_category $1
tail_logs $CATEGORY tail_logs $CATEGORY
;; ;;
"travis" )
prepare_category $1
$0 dist
$0 image
$0 test $CATEGORY
$0 tail $CATEGORY
;;
"github" ) "github" )
prepare_category $1 prepare_category $1
$0 test $CATEGORY $0 test $CATEGORY