mirror of
https://github.com/apache/druid.git
synced 2025-02-09 11:34:54 +00:00
* Fixes and tests related to the Indexer process. Three bugs fixed: 1) Indexers would not announce themselves as segment servers if they did not have storage locations defined. This used to work, but was broken in #9971. Fixed this by adding an "isSegmentServer" method to ServerType and updating SegmentLoadDropHandler to always announce if this method returns true. 2) Certain batch task types were written in a way that assumed "isReady" would be called before "run", which is not guaranteed. In particular, they relied on it in order to initialize "taskLockHelper". Fixed this by updating AbstractBatchIndexTask to ensure "isReady" is called before "run" for these tasks. 3) UnifiedIndexerAppenderatorsManager did not properly handle complex datasources. Introduced DataSourceAnalysis in order to fix this. Test changes: 1) Add a new "docker-compose.cli-indexer.yml" config that spins up an Indexer instead of a MiddleManager. 2) Introduce a "USE_INDEXER" environment variable that determines if docker-compose will start up an Indexer or a MiddleManager. 3) Duplicate all the jdk8 tests and run them in both MiddleManager and Indexer mode. 4) Various adjustments to encourage fail-fast errors in the Docker build scripts. 5) Various adjustments to speed up integration tests and reduce memory usage. 6) Add another Mac-specific approach to determining a machine's own IP. This was useful on my development machine. 7) Update segment-count check in ITCompactionTaskTest to eliminate a race condition (it was looking for 6 segments, which only exist together briefly, until the older 4 are marked unused). Javadoc updates: 1) AbstractBatchIndexTask: Added javadocs to determineLockGranularityXXX that make it clear when taskLockHelper will be initialized as a side effect. (Related to the second bug above.) 2) Task: Clarified that "isReady" is not guaranteed to be called before "run". It was already implied, but now it's explicit. 3) ZkCoordinator: Clarified deprecation message. 4) DataSegmentServerAnnouncer: Clarified deprecation message. * Fix stop_cluster script. * Fix sanity check in script. * Fix hashbang lines. * Test and doc adjustments. * Additional tests, and adjustments for tests. * Split ITs back out. * Revert change to druid_coordinator_period_indexingPeriod. * Set Indexer capacity to match MM. * Bump up Historical memory. * Bump down coordinator, overlord memory. * Bump up Broker memory.
577 lines
28 KiB
YAML
577 lines
28 KiB
YAML
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
language: java
|
|
|
|
sudo: true
|
|
dist: xenial
|
|
|
|
jdk:
|
|
- openjdk8
|
|
|
|
cache:
|
|
directories:
|
|
- $HOME/.m2
|
|
|
|
env:
|
|
global:
|
|
- DOCKER_IP=127.0.0.1 # for integration tests
|
|
- MVN="mvn -B"
|
|
- > # Various options to make execution of maven goals faster (e.g., mvn install)
|
|
MAVEN_SKIP="-Pskip-static-checks -Ddruid.console.skip=true -Dmaven.javadoc.skip=true"
|
|
- MAVEN_SKIP_TESTS="-Pskip-tests"
|
|
|
|
addons:
|
|
apt:
|
|
packages:
|
|
- maven
|
|
|
|
# Add various options to make 'mvn install' fast and skip javascript compile (-Ddruid.console.skip=true) since it is not
|
|
# needed. Depending on network speeds, "mvn -q install" may take longer than the default 10 minute timeout to print any
|
|
# output. To compensate, use travis_wait to extend the timeout.
|
|
install: MAVEN_OPTS='-Xmx3000m' travis_wait 15 ${MVN} clean install -q -ff ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C
|
|
|
|
stages:
|
|
- name: test # jobs that do not specify a stage get this default value
|
|
if: type != cron
|
|
- name: cron
|
|
if: type = cron
|
|
|
|
jobs:
|
|
include:
|
|
- name: "animal sniffer checks"
|
|
script: ${MVN} animal-sniffer:check --fail-at-end
|
|
|
|
- name: "checkstyle"
|
|
script: ${MVN} checkstyle:checkstyle --fail-at-end
|
|
|
|
- name: "enforcer checks"
|
|
script: ${MVN} enforcer:enforce --fail-at-end
|
|
|
|
- name: "forbidden api checks"
|
|
script: ${MVN} forbiddenapis:check forbiddenapis:testCheck --fail-at-end
|
|
|
|
- name: "pmd checks"
|
|
script: ${MVN} pmd:check --fail-at-end # TODO: consider adding pmd:cpd-check
|
|
|
|
- name: "spotbugs checks"
|
|
script: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks'
|
|
|
|
- name: "license checks"
|
|
install: skip
|
|
before_script: &setup_generate_license
|
|
- sudo apt-get update && sudo apt-get install python3 python3-pip python3-setuptools -y
|
|
- pip3 install wheel # install wheel first explicitly
|
|
- pip3 install pyyaml
|
|
script:
|
|
- >
|
|
${MVN} apache-rat:check -Prat --fail-at-end
|
|
-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
|
|
-Drat.consoleOutput=true
|
|
# Generate dependency reports and checks they are valid. When running on Travis CI, 2 cores are available
|
|
# (https://docs.travis-ci.com/user/reference/overview/#virtualisation-environment-vs-operating-system).
|
|
- mkdir -p target
|
|
- distribution/bin/generate-license-dependency-reports.py . target --clean-maven-artifact-transfer --parallel 2
|
|
- distribution/bin/check-licenses.py licenses.yaml target/license-reports
|
|
|
|
- name: "(openjdk8) strict compilation"
|
|
install: skip
|
|
# Strict compilation requires more than 2 GB
|
|
script: >
|
|
MAVEN_OPTS='-Xmx3000m' ${MVN} clean -Pstrict compile test-compile --fail-at-end
|
|
-pl '!benchmarks' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
|
|
|
|
- name: "analyze dependencies"
|
|
script: |-
|
|
MAVEN_OPTS='-Xmx3000m' ${MVN} ${MAVEN_SKIP} dependency:analyze -DoutputXML=true -DignoreNonCompile=true -DfailOnWarning=true || { echo "
|
|
|
|
The dependency analysis has found a dependency that is either:
|
|
|
|
1) Used and undeclared: These are available as a transitive dependency but should be explicitly
|
|
added to the POM to ensure the dependency version. The XML to add the dependencies to the POM is
|
|
shown above.
|
|
|
|
2) Unused and declared: These are not needed and removing them from the POM will speed up the build
|
|
and reduce the artifact size. The dependencies to remove are shown above.
|
|
|
|
If there are false positive dependency analysis warnings, they can be suppressed:
|
|
https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html#usedDependencies
|
|
https://maven.apache.org/plugins/maven-dependency-plugin/examples/exclude-dependencies-from-dependency-analysis.html
|
|
|
|
For more information, refer to:
|
|
https://maven.apache.org/plugins/maven-dependency-plugin/analyze-mojo.html
|
|
|
|
" && false; }
|
|
|
|
- name: "intellij inspections"
|
|
script: >
|
|
docker run --rm
|
|
-v $(pwd):/project
|
|
-v ~/.m2:/home/inspect/.m2
|
|
ccaominh/intellij-inspect:1.0.0
|
|
/project/pom.xml
|
|
/project/.idea/inspectionProfiles/Druid.xml
|
|
--levels ERROR
|
|
--scope JavaInspectionsScope
|
|
|
|
- &package
|
|
name: "(openjdk8) packaging check"
|
|
install: skip
|
|
before_script: *setup_generate_license
|
|
script: >
|
|
MAVEN_OPTS='-Xmx3000m' ${MVN} clean install -Prat -Pdist -Pbundle-contrib-exts --fail-at-end
|
|
-pl '!benchmarks' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -Ddruid.console.skip=false -T1C
|
|
|
|
- <<: *package
|
|
name: "(openjdk11) packaging check"
|
|
jdk: openjdk11
|
|
|
|
- <<: *package
|
|
name: "Build and test on ARM64 CPU architecture"
|
|
arch: arm64
|
|
jdk: openjdk11
|
|
|
|
- &test_processing_module
|
|
name: "(openjdk8) processing module test"
|
|
env:
|
|
- MAVEN_PROJECTS='processing'
|
|
before_script:
|
|
- export DRUID_USE_DEFAULT_VALUE_FOR_NULL=true
|
|
script:
|
|
- unset _JAVA_OPTIONS
|
|
# Set MAVEN_OPTS for Surefire launcher. Skip remoteresources to avoid intermittent connection timeouts when
|
|
# resolving the SIGAR dependency.
|
|
- >
|
|
MAVEN_OPTS='-Xmx800m' ${MVN} test -pl ${MAVEN_PROJECTS}
|
|
${MAVEN_SKIP} -Dremoteresources.skip=true -Ddruid.generic.useDefaultValueForNull=${DRUID_USE_DEFAULT_VALUE_FOR_NULL}
|
|
- sh -c "dmesg | egrep -i '(oom|out of memory|kill process|killed).*' -C 1 || exit 0"
|
|
- free -m
|
|
- ${MVN} -pl ${MAVEN_PROJECTS} jacoco:report
|
|
# Add merge target branch to determine diff (see https://github.com/travis-ci/travis-ci/issues/6069).
|
|
# This is not needed for build triggered by tags, since there will be no code diff.
|
|
- echo "TRAVIS_BRANCH=${TRAVIS_BRANCH}" # for debugging
|
|
- if [[ -z "${TRAVIS_TAG}" ]]; then git remote set-branches --add origin ${TRAVIS_BRANCH} && git fetch; fi
|
|
# Determine the modified files that match the maven projects being tested. We use maven project lists that
|
|
# either exclude (starts with "!") or include (does not start with "!"), so both cases need to be handled.
|
|
# If the build is triggered by a tag, an error will be printed, but `all_files` will be correctly set to empty
|
|
# so that the coverage check is skipped.
|
|
- all_files="$(git diff --name-only origin/${TRAVIS_BRANCH}...HEAD | grep "\.java$" || [[ $? == 1 ]])"
|
|
- for f in ${all_files}; do echo $f; done # for debugging
|
|
- >
|
|
if [[ "${MAVEN_PROJECTS}" = \!* ]]; then
|
|
regex="${MAVEN_PROJECTS:1}";
|
|
regex="^${regex//,\!/\\|^}";
|
|
project_files="$(echo "${all_files}" | grep -v "${regex}" || [[ $? == 1 ]])";
|
|
else
|
|
regex="^${MAVEN_PROJECTS//,/\\|^}";
|
|
project_files="$(echo "${all_files}" | grep "${regex}" || [[ $? == 1 ]])";
|
|
fi
|
|
- for f in ${project_files}; do echo $f; done # for debugging
|
|
# Check diff code coverage for the maven projects being tested (retry install in case of network error).
|
|
# Currently, the function coverage check is not reliable, so it is disabled.
|
|
- >
|
|
if [ -n "${project_files}" ]; then
|
|
travis_retry npm install @connectis/diff-test-coverage@1.5.3
|
|
&& git diff origin/${TRAVIS_BRANCH}...HEAD -- ${project_files}
|
|
| node_modules/.bin/diff-test-coverage
|
|
--coverage "**/target/site/jacoco/jacoco.xml"
|
|
--type jacoco
|
|
--line-coverage 50
|
|
--branch-coverage 50
|
|
--function-coverage 0
|
|
--log-template "coverage-lines-complete"
|
|
--log-template "coverage-files-complete"
|
|
--log-template "totals-complete"
|
|
--log-template "errors"
|
|
--
|
|
|| { printf "\n\n****FAILED****\nDiff code coverage check failed. To view coverage report, run 'mvn clean test jacoco:report' and open 'target/site/jacoco/index.html'\nFor more details on how to run code coverage locally, follow instructions here - https://github.com/apache/druid/blob/master/dev/code-review/code-coverage.md#running-code-coverage-locally\n\n" && false; }
|
|
fi
|
|
after_success:
|
|
# retry in case of network error
|
|
- travis_retry curl -o codecov.sh -s https://codecov.io/bash
|
|
- travis_retry bash codecov.sh -X gcov
|
|
|
|
- <<: *test_processing_module
|
|
name: "(openjdk11) processing module test"
|
|
jdk: openjdk11
|
|
|
|
- &test_processing_module_sqlcompat
|
|
<<: *test_processing_module
|
|
name: "(openjdk8) processing module test (SQL Compatibility)"
|
|
before_script: &setup_sqlcompat
|
|
- export DRUID_USE_DEFAULT_VALUE_FOR_NULL=false
|
|
|
|
- <<: *test_processing_module_sqlcompat
|
|
name: "(openjdk11) processing module test (SQL Compatibility)"
|
|
jdk: openjdk11
|
|
|
|
- &test_indexing_module
|
|
<<: *test_processing_module
|
|
name: "(openjdk8) indexing modules test"
|
|
env:
|
|
- MAVEN_PROJECTS='indexing-hadoop,indexing-service,extensions-core/kafka-indexing-service,extensions-core/kinesis-indexing-service'
|
|
|
|
- <<: *test_indexing_module
|
|
name: "(openjdk11) indexing modules test"
|
|
jdk: openjdk11
|
|
|
|
- &test_indexing_module_sqlcompat
|
|
<<: *test_indexing_module
|
|
name: "(openjdk8) indexing modules test (SQL Compatibility)"
|
|
before_script: *setup_sqlcompat
|
|
|
|
- <<: *test_indexing_module_sqlcompat
|
|
name: "(openjdk11) indexing modules test (SQL Compatibility)"
|
|
jdk: openjdk11
|
|
|
|
- &test_server_module
|
|
<<: *test_processing_module
|
|
name: "(openjdk8) server module test"
|
|
env:
|
|
- MAVEN_PROJECTS='server'
|
|
|
|
- <<: *test_server_module
|
|
name: "(openjdk11) server module test"
|
|
jdk: openjdk11
|
|
|
|
- &test_server_module_sqlcompat
|
|
<<: *test_server_module
|
|
name: "(openjdk8) server module test (SQL Compatibility)"
|
|
before_script: *setup_sqlcompat
|
|
|
|
- <<: *test_server_module_sqlcompat
|
|
name: "(openjdk11) server module test (SQL Compatibility)"
|
|
jdk: openjdk11
|
|
|
|
- &test_other_modules
|
|
<<: *test_processing_module
|
|
name: "(openjdk8) other modules test"
|
|
env:
|
|
- MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests'
|
|
|
|
- <<: *test_other_modules
|
|
name: "(openjdk11) other modules test"
|
|
jdk: openjdk11
|
|
|
|
- &test_other_modules_sqlcompat
|
|
<<: *test_other_modules
|
|
name: "(openjdk8) other modules test (SQL Compatibility)"
|
|
before_script: *setup_sqlcompat
|
|
|
|
- <<: *test_other_modules_sqlcompat
|
|
name: "(openjdk11) other modules test (SQL Compatibility)"
|
|
jdk: openjdk11
|
|
|
|
- name: "web console"
|
|
install: skip
|
|
script:
|
|
- ${MVN} test -pl 'web-console'
|
|
after_success:
|
|
- (cd web-console && travis_retry npm run codecov) # retry in case of network error
|
|
|
|
- name: "web console end-to-end test"
|
|
before_install: *setup_generate_license
|
|
install: web-console/script/druid build
|
|
before_script: web-console/script/druid start
|
|
script: (cd web-console && npm run test-e2e)
|
|
after_script: web-console/script/druid stop
|
|
|
|
- name: "docs"
|
|
install: (cd website && npm install)
|
|
script: |-
|
|
(cd website && npm run lint && npm run spellcheck) || { echo "
|
|
|
|
If there are spell check errors:
|
|
|
|
1) Suppressing False Positives: Edit website/.spelling to add suppressions. Instructions
|
|
are at the top of the file and explain how to suppress false positives either globally or
|
|
within a particular file.
|
|
|
|
2) Running Spell Check Locally: cd website && npm install && npm run spellcheck
|
|
|
|
For more information, refer to: https://www.npmjs.com/package/markdown-spellcheck
|
|
|
|
" && false; }
|
|
|
|
# Integration tests Java Compile version is set by the machine environment jdk (set by the jdk key)
|
|
# Integration tests Java Runtime version is set by the JVM_RUNTIME env property (set env key to -Djvm.runtime=<JVM_RUNTIME_VERSION>)
|
|
# Integration tests will either use MiddleManagers or Indexers
|
|
# (Currently integration tests only support running with jvm runtime 8 and 11)
|
|
# START - Integration tests for Compile with Java 8 and Run with Java 8
|
|
- &integration_batch_index
|
|
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test"
|
|
jdk: openjdk8
|
|
services: &integration_test_services
|
|
- docker
|
|
env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: &run_integration_test
|
|
- ${MVN} verify -pl integration-tests -P integration-tests ${TESTNG_GROUPS} ${JVM_RUNTIME} -Dit.indexer=${USE_INDEXER} ${MAVEN_SKIP}
|
|
after_failure: &integration_test_diags
|
|
- for v in ~/shared/logs/*.log ; do
|
|
echo $v logtail ======================== ; tail -100 $v ;
|
|
done
|
|
- for v in broker middlemanager overlord router coordinator historical ; do
|
|
echo $v dmesg ======================== ;
|
|
docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
|
|
done
|
|
|
|
- <<: *integration_batch_index
|
|
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_input_format
|
|
name: "(Compile=openjdk8, Run=openjdk8) input format integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_input_format
|
|
name: "(Compile=openjdk8, Run=openjdk8) input format integration test with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_input_source
|
|
name: "(Compile=openjdk8, Run=openjdk8) input source integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_input_source
|
|
name: "(Compile=openjdk8, Run=openjdk8) input source integration test with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_perfect_rollup_parallel_batch_index
|
|
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_perfect_rollup_parallel_batch_index
|
|
name: "(Compile=openjdk8, Run=openjdk8) perfect rollup parallel batch index integration test with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_kafka_index
|
|
name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=kafka-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_kafka_index
|
|
name: "(Compile=openjdk8, Run=openjdk8) kafka index, transactional kafka index integration test with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=kafka-index,kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_kafka_index_slow
|
|
name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_kafka_index_slow
|
|
name: "(Compile=openjdk8, Run=openjdk8) kafka index integration test slow with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=kafka-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_kafka_transactional_index
|
|
name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- &integration_kafka_transactional_index_slow
|
|
name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_kafka_transactional_index_slow
|
|
name: "(Compile=openjdk8, Run=openjdk8) transactional kafka index integration test slow with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=kafka-transactional-index-slow' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_kafka_format_tests
|
|
name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_kafka_format_tests
|
|
name: "(Compile=openjdk8, Run=openjdk8) Kafka index integration test with various formats with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=kafka-data-format' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_query
|
|
name: "(Compile=openjdk8, Run=openjdk8) query integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- &integration_query_retry
|
|
name: "(Compile=openjdk8, Run=openjdk8) query retry integration test for missing segments"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- &integration_security
|
|
name: "(Compile=openjdk8, Run=openjdk8) security integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- &integration_realtime_index
|
|
name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- &integration_append_ingestion
|
|
name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_append_ingestion
|
|
name: "(Compile=openjdk8, Run=openjdk8) append ingestion integration test with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_compaction_tests
|
|
name: "(Compile=openjdk8, Run=openjdk8) compaction integration test"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_compaction_tests
|
|
name: "(Compile=openjdk8, Run=openjdk8) compaction integration test with Indexer"
|
|
env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
|
|
- &integration_tests
|
|
name: "(Compile=openjdk8, Run=openjdk8) other integration tests"
|
|
jdk: openjdk8
|
|
services: *integration_test_services
|
|
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
|
|
script: *run_integration_test
|
|
after_failure: *integration_test_diags
|
|
|
|
- <<: *integration_tests
|
|
name: "(Compile=openjdk8, Run=openjdk8) other integration tests with Indexer"
|
|
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
|
|
# END - Integration tests for Compile with Java 8 and Run with Java 8
|
|
|
|
# START - Integration tests for Compile with Java 8 and Run with Java 11
|
|
- <<: *integration_batch_index
|
|
name: "(Compile=openjdk8, Run=openjdk11) batch index integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_input_format
|
|
name: "(Compile=openjdk8, Run=openjdk11) input format integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=input-format' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_input_source
|
|
name: "(Compile=openjdk8, Run=openjdk11) input source integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=input-source' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_perfect_rollup_parallel_batch_index
|
|
name: "(Compile=openjdk8, Run=openjdk11) perfect rollup parallel batch index integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=perfect-rollup-parallel-batch-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_query
|
|
name: "(Compile=openjdk8, Run=openjdk11) query integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_query_retry
|
|
name: "(Compile=openjdk8, Run=openjdk11) query retry integration test for missing segments"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=query-retry' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_security
|
|
name: "(Compile=openjdk8, Run=openjdk11) security integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_realtime_index
|
|
name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=realtime-index' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_append_ingestion
|
|
name: "(Compile=openjdk8, Run=openjdk11) append ingestion integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=append-ingestion' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_compaction_tests
|
|
name: "(Compile=openjdk8, Run=openjdk11) compaction integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-Dgroups=compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
|
|
- <<: *integration_tests
|
|
name: "(Compile=openjdk8, Run=openjdk11) other integration test"
|
|
jdk: openjdk8
|
|
env: TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-hdfs-deep-storage,hadoop-gcs-to-gcs-deep-storage,hadoop-gcs-to-hdfs-deep-storage,aliyun-oss-deep-storage,append-ingestion,compaction' JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
|
|
# END - Integration tests for Compile with Java 8 and Run with Java 11
|
|
|
|
- name: "security vulnerabilities"
|
|
stage: cron
|
|
install: skip
|
|
script: |-
|
|
${MVN} dependency-check:check || { echo "
|
|
|
|
The OWASP dependency check has found security vulnerabilities. Please use a newer version
|
|
of the dependency that does not have vulnerabilities. If the analysis has false positives,
|
|
they can be suppressed by adding entries to owasp-dependency-check-suppressions.xml (for more
|
|
information, see https://jeremylong.github.io/DependencyCheck/general/suppression.html).
|
|
|
|
" && false; }
|
|
|
|
# Travis CI only supports per build (and not per-job notifications): https://github.com/travis-ci/travis-ci/issues/9888
|
|
notifications:
|
|
email:
|
|
if: type = cron
|
|
recipients:
|
|
# This is the string "dev@druid.apache.org" encrypted against the apache/druid repo so that forks are unable to
|
|
# use this notification:
|
|
# https://github.com/travis-ci/travis-ci/issues/1094#issuecomment-215019909
|
|
# https://github.com/travis-ci/travis-ci/issues/2711
|
|
- secure: "MupjX/0jLwh3XzHPl74BTk2/Kp5r+8TrEewfRhpQdWKFMBXLKNqu0k2VXf5C/NIg3uvPianq3REk+qeTHI8dL2ShjiWS/eIRkJOHLfObdNNBuos5fo4TxAuBQcXyT4VjAq5jnAkH84Pxf2Nl0rkisWoIhvwSX7+kNrjW1qdu7K0="
|
|
on_success: change
|
|
on_failure: change
|