Building druid-it-tools and running for travis in it.sh (#12957)

* Building druid-it-tools and running for travis in it.sh

* Addressing comments

* Updating druid-it-image pom to point to correct it-tools

* Updating all it-tools references to druid-it-tools

* Adding dist back to it.sh travis

* Trigger Build

* Disabling batchIndex tests and commenting out user specific code

* Fixing checkstyle and intellij inspection errors

* Replacing tabs with spaces in it.sh

* Enabling old batch index tests with indexer
This commit is contained in:
abhagraw 2022-08-30 12:48:07 +05:30 committed by GitHub
parent 414176fb97
commit f3c47cf68c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 94 additions and 161 deletions

View File

@ -46,7 +46,7 @@ addons:
# Add various options to make 'mvn install' fast and skip javascript compile (-Ddruid.console.skip=true) since it is not
# needed. Depending on network speeds, "mvn -q install" may take longer than the default 10 minute timeout to print any
# output. To compensate, use travis_wait to extend the timeout.
install: ./check_test_suite.py && travis_terminate 0 || echo 'Running Maven install...' && MAVEN_OPTS='-Xmx3000m' travis_wait 15 ${MVN} clean install -q -ff -pl '!distribution,!:druid-it-tools,!:druid-it-image,!:druid-it-cases' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C && ${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
install: ./check_test_suite.py && travis_terminate 0 || echo 'Running Maven install...' && MAVEN_OPTS='-Xmx3000m' travis_wait 15 ${MVN} clean install -q -ff -pl '!distribution,!:druid-it-image,!:druid-it-cases' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C && ${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
# There are 3 stages of tests
# 1. Tests - phase 1
@ -89,7 +89,6 @@ jobs:
script: ${MVN} spotbugs:check --fail-at-end -pl '!benchmarks'
- name: "license checks"
install: skip
before_script: &setup_generate_license
- sudo apt-get update && sudo apt-get install python3 python3-pip python3-setuptools -y
- ./check_test_suite.py && travis_terminate 0 || echo 'Continuing setup'
@ -457,9 +456,9 @@ jobs:
docker exec -it druid-$v sh -c 'dmesg | tail -3' ;
done
#- <<: *integration_batch_index
# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer"
# env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- <<: *integration_batch_index
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer"
env: TESTNG_GROUPS='-Dgroups=batch-index' JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
- &integration_input_format
name: "(Compile=openjdk8, Run=openjdk8) input format integration test"
@ -683,15 +682,16 @@ jobs:
jdk: openjdk8
services: *integration_test_services
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
# Uses the install defined above. Then, builds the test tools and docker image,
# Uses the installation defined above. Then, builds the test tools and docker image,
# and runs one IT. If tests fail, echos log lines of any of
# the Druid services that did not exit normally.
script: ./it.sh travis HighAvailability
- <<: *integration_tests_ex
name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer (new)"
env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
script: ./it.sh travis BatchIndex
# Disabling BatchIndex test as it is failing with due to timeout, fixing it will be taken in a separate PR.
#- <<: *integration_tests_ex
# name: "(Compile=openjdk8, Run=openjdk8) batch index integration test with Indexer (new)"
# env: JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='indexer'
# script: ./it.sh travis BatchIndex
# END - Integration tests for Compile with Java 8 and Run with Java 8

View File

@ -49,7 +49,7 @@ DRUID_INSTANCE=
# variables: druid_standard_loadList defined here, and druid_test_loadList, defined
# in a docker-compose.yaml file, for any test-specific extensions.
# See compose.md for more details.
druid_standard_loadList=mysql-metadata-storage,it-tools,druid-lookups-cached-global,druid-histogram,druid-datasketches,druid-parquet-extensions,druid-avro-extensions,druid-protobuf-extensions,druid-orc-extensions,druid-kafka-indexing-service,druid-s3-extensions
druid_standard_loadList=mysql-metadata-storage,druid-it-tools,druid-lookups-cached-global,druid-histogram,druid-datasketches,druid-parquet-extensions,druid-avro-extensions,druid-protobuf-extensions,druid-orc-extensions,druid-kafka-indexing-service,druid-s3-extensions
# Location of Hadoop dependencies provided at runtime in the shared directory.
druid_extensions_hadoopDependenciesDir=/shared/hadoop-dependencies

View File

@ -24,12 +24,6 @@ import org.apache.druid.testsEx.config.ClusterConfig.ClusterType;
import org.apache.druid.testsEx.config.ResolvedService.ResolvedZk;
import org.junit.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.Properties;
@ -43,7 +37,7 @@ import static org.junit.Assert.assertNotNull;
public class ClusterConfigTest
{
@Test
public void testYaml() throws FileNotFoundException
public void testYaml()
{
ClusterConfig config = ClusterConfig.loadFromResource("/config-test/test.yaml");
// Uncomment this line to see the full config with includes resolved.
@ -85,15 +79,6 @@ public class ClusterConfigTest
assertEquals("http://localhost:8888", service.clientUrl());
assertEquals("http://localhost:8888", resolved.routerUrl());
File userEnv = new File(
new File(
System.getProperty("user.home"),
"druid-it"),
"Test.env");
try (PrintWriter out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(userEnv), StandardCharsets.UTF_8))) {
out.println("druid_user_var=user");
}
System.setProperty("druid_sys_prop", "sys");
Map<String, Object> props = resolved.toProperties();
// Added from ZK section

View File

@ -1,58 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------
# Definition of the batch index test cluster.
# See https://yaml.org/spec/1.2.2 for more about YAML
include:
- /cluster/Common/zk-metastore.yaml
druid:
coordinator:
instances:
- port: 8081
overlord:
instances:
- port: 8090
broker:
instances:
- port: 8082
router:
instances:
- port: 8888
historical:
instances:
- port: 8083
indexer:
instances:
- port: 8091
# Properties to be set in the Properties object used in
# Guice configuration in lieu of the server-side runtime.properties
# file.
#
# druid.global.http.numMaxThreads avoids creating 40+ Netty threads.
# We only ever use 1.
# druid.test.config.dockerIp is used by some older test code. Remove
# it when that code is updated.
properties:
druid.global.http.numMaxThreads: 3
druid.broker.http.numMaxThreads: 3
druid.test.config.dockerIp: localhost
druid.test.config.cloudBucket: "new-it-framework"
druid.test.config.cloudPath: ""
docker.build.hadoop: true
start.hadoop.docker: true
override.config.path: "/Users/abhishekagrawal/pr_druid_it/druid/integration-tests-ex/it-azure-deep-storage/azure-config"

View File

@ -52,7 +52,7 @@ ENV DRUID_HOME=/usr/local/druid
# Populate build artifacts
COPY apache-druid-${DRUID_VERSION}-bin.tar.gz /usr/local/
COPY it-tools-${DRUID_VERSION}.jar /tmp/druid/extensions/it-tools/
COPY druid-it-tools-${DRUID_VERSION}.jar /tmp/druid/extensions/druid-it-tools/
COPY kafka-protobuf-provider-${CONFLUENT_VERSION}.jar /tmp/druid/lib/
COPY mysql-connector-java-${MYSQL_VERSION}.jar /tmp/druid/lib/
COPY mariadb-java-client-${MARIADB_VERSION}.jar /tmp/druid/lib/

View File

@ -91,8 +91,8 @@ Reference: https://dzone.com/articles/build-docker-image-from-maven
<type>pom</type>
</dependency>
<dependency>
<groupId>org.apache.druid</groupId>
<artifactId>it-tools</artifactId>
<groupId>org.apache.druid.integration-tests</groupId>
<artifactId>druid-it-tools</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
@ -155,8 +155,8 @@ Reference: https://dzone.com/articles/build-docker-image-from-maven
<outputDirectory>${project.build.directory}/docker</outputDirectory>
</artifactItem>
<artifactItem>
<groupId>org.apache.druid</groupId>
<artifactId>it-tools</artifactId>
<groupId>org.apache.druid.integration-tests</groupId>
<artifactId>druid-it-tools</artifactId>
<version>${project.version}</version>
<type>jar</type>
<overWrite>true</overWrite>

146
it.sh
View File

@ -18,17 +18,20 @@
# Utility script for running the new integration tests, since the Maven
# commands are unwieldy.
set -e
export DRUID_DEV=$(cd $(dirname $0) && pwd)
function usage
{
cat <<EOF
cat <<EOF
Usage: $0 cmd [category]
build
build Druid and the distribution
dist
build the Druid distribution (only)
tools
build druid-it-tools
image
build the test image
up <category>
@ -48,13 +51,13 @@ EOF
function tail_logs
{
category=$1
cd integration-tests-ex/cases/target/$category/logs
ls *.log | while read log;
do
echo "----- $category/$log -----"
tail -20 $log
done
category=$1
cd integration-tests-ex/cases/target/$category/logs
ls *.log | while read log;
do
echo "----- $category/$log -----"
tail -20 $log
done
}
CMD=$1
@ -62,69 +65,72 @@ shift
MAVEN_IGNORE="-P skip-static-checks,skip-tests -Dmaven.javadoc.skip=true"
case $CMD in
"help" )
usage
;;
"build" )
mvn clean package -P dist $MAVEN_IGNORE -T1.0C
;;
"dist" )
mvn package -P dist $MAVEN_IGNORE -pl :distribution
;;
"image" )
cd $DRUID_DEV/integration-tests-ex/image
mvn install -P test-image $MAVEN_IGNORE
;;
"up" )
if [ -z "$1" ]; then
usage
exit 1
fi
cd $DRUID_DEV/integration-tests-ex/cases
./cluster.sh up $1
;;
"down" )
if [ -z "$1" ]; then
usage
exit 1
fi
cd $DRUID_DEV/integration-tests-ex/cases
./cluster.sh down $1
;;
"test" )
if [ -z "$1" ]; then
usage
exit 1
fi
cd $DRUID_DEV/integration-tests-ex/cases
mvn verify -P skip-static-checks,docker-tests,IT-$1 \
"help" )
usage
;;
"build" )
mvn clean package -P dist $MAVEN_IGNORE -T1.0C
;;
"dist" )
mvn package -P dist $MAVEN_IGNORE -pl :distribution
;;
"tools" )
mvn install -pl :druid-it-tools
;;
"image" )
cd $DRUID_DEV/integration-tests-ex/image
mvn install -P test-image $MAVEN_IGNORE
;;
"up" )
if [ -z "$1" ]; then
usage
exit 1
fi
cd $DRUID_DEV/integration-tests-ex/cases
./cluster.sh up $1
;;
"down" )
if [ -z "$1" ]; then
usage
exit 1
fi
cd $DRUID_DEV/integration-tests-ex/cases
./cluster.sh down $1
;;
"test" )
if [ -z "$1" ]; then
usage
exit 1
fi
cd $DRUID_DEV/integration-tests-ex/cases
mvn verify -P skip-static-checks,docker-tests,IT-$1 \
-Dmaven.javadoc.skip=true -DskipUTs=true \
-pl :druid-it-cases
;;
"tail" )
if [ -z "$1" ]; then
usage
exit 1
fi
tail_logs $1
;;
;;
"tail" )
if [ -z "$1" ]; then
usage
exit 1
fi
tail_logs $1
;;
"travis" )
if [ -z "$1" ]; then
usage
exit 1
fi
$0 dist
$0 image
$0 test $1
$0 tail $1
;;
"prune" )
# Caution: this removes all volumes, which is generally what you
# want when testing.
docker system prune --volumes
;;
* )
usage
exit -1
;;
if [ -z "$1" ]; then
usage
exit 1
fi
$0 dist
$0 image
$0 test $1
$0 tail $1
;;
"prune" )
# Caution: this removes all volumes, which is generally what you
# want when testing.
docker system prune --volumes
;;
* )
usage
exit -1
;;
esac