diff --git a/.gitignore b/.gitignore index 0828365f2a7..adaf4d14c2b 100644 --- a/.gitignore +++ b/.gitignore @@ -27,7 +27,7 @@ README .pmdruleset.xml .java-version integration-tests/gen-scripts/ -/bin/ +**/bin/ *.hprof **/.ipynb_checkpoints/ website/.yarn/ diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/InPlanningBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/InPlanningBenchmark.java index ce01324116f..046da257423 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/InPlanningBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/InPlanningBenchmark.java @@ -55,6 +55,7 @@ import org.apache.druid.sql.calcite.planner.PlannerResult; import org.apache.druid.sql.calcite.run.SqlEngine; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.LinearShardSpec; import org.openjdk.jmh.annotations.Benchmark; @@ -202,7 +203,8 @@ public class InPlanningBenchmark new CalciteRulesManager(ImmutableSet.of()), CalciteTests.createJoinableFactoryWrapper(), CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); String prefix = ("explain plan for select long1 from foo where long1 in "); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlBenchmark.java index 3441e31c049..06ea2aa9e19 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlBenchmark.java @@ -89,6 +89,7 @@ import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.calcite.util.LookylooModule; import org.apache.druid.sql.calcite.util.QueryFrameworkUtils; import org.apache.druid.sql.calcite.util.testoperator.CalciteTestOperatorModule; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.timeline.DataSegment; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -674,7 +675,8 @@ public class SqlBenchmark new CalciteRulesManager(ImmutableSet.of()), new JoinableFactoryWrapper(QueryFrameworkUtils.createDefaultJoinableFactory(injector)), CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); return Pair.of(plannerFactory, engine); diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlExpressionBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlExpressionBenchmark.java index 8b2172182a0..d351b40f2db 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlExpressionBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlExpressionBenchmark.java @@ -59,6 +59,7 @@ import org.apache.druid.sql.calcite.planner.PlannerResult; import org.apache.druid.sql.calcite.run.SqlEngine; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.LinearShardSpec; import org.openjdk.jmh.annotations.Benchmark; @@ -364,7 +365,8 @@ public class SqlExpressionBenchmark new CalciteRulesManager(ImmutableSet.of()), CalciteTests.createJoinableFactoryWrapper(), CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); try { diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlGroupByBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlGroupByBenchmark.java index bf521b83163..1f58f97b8e8 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlGroupByBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlGroupByBenchmark.java @@ -57,6 +57,7 @@ import org.apache.druid.sql.calcite.planner.PlannerResult; import org.apache.druid.sql.calcite.run.SqlEngine; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.LinearShardSpec; import org.openjdk.jmh.annotations.Benchmark; @@ -347,7 +348,8 @@ public class SqlGroupByBenchmark new CalciteRulesManager(ImmutableSet.of()), CalciteTests.createJoinableFactoryWrapper(), CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); try { diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlNestedDataBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlNestedDataBenchmark.java index c329e9da30e..b896f8df52f 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlNestedDataBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlNestedDataBenchmark.java @@ -63,6 +63,7 @@ import org.apache.druid.sql.calcite.planner.PlannerResult; import org.apache.druid.sql.calcite.run.SqlEngine; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.LinearShardSpec; import org.openjdk.jmh.annotations.Benchmark; @@ -402,7 +403,8 @@ public class SqlNestedDataBenchmark new CalciteRulesManager(ImmutableSet.of()), CalciteTests.createJoinableFactoryWrapper(), CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); try { diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java index bffc03469e8..2150b2100fb 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/query/SqlVsNativeBenchmark.java @@ -50,6 +50,7 @@ import org.apache.druid.sql.calcite.planner.PlannerResult; import org.apache.druid.sql.calcite.run.SqlEngine; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.LinearShardSpec; import org.openjdk.jmh.annotations.Benchmark; @@ -132,7 +133,8 @@ public class SqlVsNativeBenchmark new CalciteRulesManager(ImmutableSet.of()), CalciteTests.createJoinableFactoryWrapper(), CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); groupByQuery = GroupByQuery .builder() diff --git a/dev/quidem b/dev/quidem index a68e08b3cfc..1a7d3f0ec96 100755 --- a/dev/quidem +++ b/dev/quidem @@ -28,13 +28,13 @@ OPTS+=" -Dsurefire.rerunFailingTestsCount=0" OPTS+=" -Dorg.slf4j.simpleLogger.log.org.apache.maven.plugin.surefire.SurefirePlugin=INFO" [[ $@ =~ "-q" ]] && OPTS+=" -Dsurefire.trimStackTrace=true" -OPTS+=" -pl sql -Dtest=SqlQuidemTest" +OPTS+=" -pl quidem-ut -Dtest=QTest" OPTS+=" org.apache.maven.plugins:maven-surefire-plugin:test" case "$1" in -h|--help) cat << EOF -Run SqlQuidemTest tests. +Run QTest tests. -q quiet (recommened) -Dquidem.overwrite enables overwrite mode -Dquidem.filter=*join* runs only tests matching path expression diff --git a/extensions-contrib/compressed-bigdecimal/pom.xml b/extensions-contrib/compressed-bigdecimal/pom.xml index 33b368b255e..d6c27d88812 100644 --- a/extensions-contrib/compressed-bigdecimal/pom.xml +++ b/extensions-contrib/compressed-bigdecimal/pom.xml @@ -166,6 +166,11 @@ com.fasterxml.jackson.core jackson-annotations + + org.reflections + reflections + test + diff --git a/extensions-contrib/tdigestsketch/pom.xml b/extensions-contrib/tdigestsketch/pom.xml index 489b0b99556..4c192d62746 100644 --- a/extensions-contrib/tdigestsketch/pom.xml +++ b/extensions-contrib/tdigestsketch/pom.xml @@ -164,6 +164,11 @@ junit-vintage-engine test + + org.reflections + reflections + test + org.easymock easymock diff --git a/extensions-core/datasketches/pom.xml b/extensions-core/datasketches/pom.xml index ee423909d19..75ff5a0868e 100644 --- a/extensions-core/datasketches/pom.xml +++ b/extensions-core/datasketches/pom.xml @@ -170,6 +170,11 @@ junit-jupiter-params test + + org.reflections + reflections + test + org.junit.vintage junit-vintage-engine diff --git a/extensions-core/druid-bloom-filter/pom.xml b/extensions-core/druid-bloom-filter/pom.xml index 8ce419095de..b57ede7b16b 100644 --- a/extensions-core/druid-bloom-filter/pom.xml +++ b/extensions-core/druid-bloom-filter/pom.xml @@ -139,6 +139,11 @@ junit-vintage-engine test + + org.reflections + reflections + test + org.apache.druid druid-processing diff --git a/extensions-core/druid-catalog/pom.xml b/extensions-core/druid-catalog/pom.xml index a48a7f1e7b8..c5bafbeb8dc 100644 --- a/extensions-core/druid-catalog/pom.xml +++ b/extensions-core/druid-catalog/pom.xml @@ -177,6 +177,11 @@ junit-vintage-engine test + + org.reflections + reflections + test + org.easymock easymock diff --git a/extensions-core/histogram/pom.xml b/extensions-core/histogram/pom.xml index c252cfb87d3..a1c1f91e379 100644 --- a/extensions-core/histogram/pom.xml +++ b/extensions-core/histogram/pom.xml @@ -123,6 +123,11 @@ junit-vintage-engine test + + org.reflections + reflections + test + org.apache.druid druid-processing diff --git a/extensions-core/multi-stage-query/pom.xml b/extensions-core/multi-stage-query/pom.xml index 9e637acff00..ab23a26e36f 100644 --- a/extensions-core/multi-stage-query/pom.xml +++ b/extensions-core/multi-stage-query/pom.xml @@ -233,6 +233,11 @@ junit-jupiter-params test + + org.reflections + reflections + test + org.easymock easymock diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/ResultsContextSerdeTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/ResultsContextSerdeTest.java index b6023392576..72f2c817622 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/ResultsContextSerdeTest.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/ResultsContextSerdeTest.java @@ -43,6 +43,7 @@ import org.apache.druid.sql.calcite.schema.NamedDruidSchema; import org.apache.druid.sql.calcite.schema.NamedViewSchema; import org.apache.druid.sql.calcite.schema.ViewSchema; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Before; @@ -75,7 +76,8 @@ public class ResultsContextSerdeTest "druid", new CalciteRulesManager(ImmutableSet.of()), CalciteTests.TEST_AUTHORIZER_MAPPER, - AuthConfig.newBuilder().build() + AuthConfig.newBuilder().build(), + new DruidHookDispatcher() ); final NativeSqlEngine engine = CalciteTests.createMockSqlEngine( EasyMock.createMock(QuerySegmentWalker.class), diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java index 080ba843204..5076e9f3d44 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/test/MSQTestBase.java @@ -186,6 +186,7 @@ import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSuppl import org.apache.druid.sql.calcite.util.TestDataBuilder; import org.apache.druid.sql.calcite.view.InProcessViewManager; import org.apache.druid.sql.guice.SqlBindings; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.storage.StorageConfig; import org.apache.druid.storage.StorageConnector; import org.apache.druid.storage.StorageConnectorModule; @@ -562,7 +563,8 @@ public class MSQTestBase extends BaseCalciteQueryTest new CalciteRulesManager(ImmutableSet.of()), CalciteTests.createJoinableFactoryWrapper(), catalogResolver, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); sqlStatementFactory = CalciteTests.createSqlStatementFactory(engine, plannerFactory); diff --git a/extensions-core/stats/pom.xml b/extensions-core/stats/pom.xml index 1b3a531dc42..048ad964bf8 100644 --- a/extensions-core/stats/pom.xml +++ b/extensions-core/stats/pom.xml @@ -123,7 +123,11 @@ junit-vintage-engine test - + + org.reflections + reflections + test + org.apache.druid druid-processing diff --git a/extensions-core/testing-tools/pom.xml b/extensions-core/testing-tools/pom.xml index 086d32559d3..fd0eed880c2 100644 --- a/extensions-core/testing-tools/pom.xml +++ b/extensions-core/testing-tools/pom.xml @@ -145,6 +145,11 @@ junit-jupiter-params test + + org.reflections + reflections + test + org.junit.vintage junit-vintage-engine diff --git a/pom.xml b/pom.xml index ea9c3dcfa17..11738830512 100644 --- a/pom.xml +++ b/pom.xml @@ -242,6 +242,7 @@ integration-tests-ex/tools integration-tests-ex/image integration-tests-ex/cases + quidem-ut diff --git a/quidem-ut/README.md b/quidem-ut/README.md new file mode 100644 index 00000000000..5b594a3c8f4 --- /dev/null +++ b/quidem-ut/README.md @@ -0,0 +1,74 @@ + + +# Quidem UT + +Enables to write sql level tests easily. +Can be used to write tests against existing test backends (ComponentSupplier) - by doing so the testcases can be moved closer to the excercised codes. + +## Usage + +### Install java&maven (if needed) + +If you don't have java&maven - one way to set that up is by using sdkman like this: +```bash +# install sdkman +curl -s "https://get.sdkman.io" | bash +# at the end of installation either open a new terminal; or follow the instructions at the end + +# install java&maven +sdk install java 11.0.23-zulu +sdk install maven + +# run mvn to see if it works +mvn --version + +# download druid sourcces (FIXME: change this to the main repo/branch before merging) +git clone --branch quidem-record https://github.com/kgyrtkirk/druid +``` + + + + +### Running these tests + +* CI execution happens by a standard JUnit test `QTest` in this module +* the `dev/quidem` script can be used to run these tests (after the project is built) + +### Launching a test generating broker + +* make sure to build the project first; one way to do that is: + ```bash + mvn install -pl quidem-ut/ -am -DskipTests -Pskip-static-checks + ``` +* launch the broker instance with: + ```bash + mvn exec:exec -pl quidem-ut -Dquidem.record.autostart=true + ``` + * the broker will be running at http://localhost:12345 + * the used test configuration backend can configured by supplying `quidem.uri` + ```bash + mvn exec:exec -pl quidem-ut -Dquidem.uri=druidtest:///?componentSupplier=ThetaSketchComponentSupplier + ``` + * new record files can be started by calling http://localhost:12345/quidem/start + * if `quidem.record.autostart` is omitted recording will not start +* after finished with the usage of the broker ; a new `record-123.iq` file will be in the project's worktree - it will contain all the executed statements + * running `dev/quidem -Dquidem.overwrite` updates the resultsets of all `iq` files around there + * rename the testfile to have a more descriptive name + diff --git a/quidem-ut/pom.xml b/quidem-ut/pom.xml new file mode 100644 index 00000000000..ac48af25c17 --- /dev/null +++ b/quidem-ut/pom.xml @@ -0,0 +1,546 @@ + + + + + 4.0.0 + + druid-quidem-ut + druid-quidem-ut + druid-quidem-ut + + + org.apache.druid + druid + 31.0.0-SNAPSHOT + + + + + confluent + https://packages.confluent.io/maven/ + + + + + druidtest:/// + false + + + + com.amazonaws + amazon-kinesis-producer + 0.13.1 + + + org.apache.avro + avro + + + com.opencsv + opencsv + + + org.apache.commons + commons-lang3 + + + org.apache.httpcomponents + httpcore + + + org.apache.httpcomponents + httpclient + + + com.github.rvesse + airline + + + com.amazonaws + aws-java-sdk-kinesis + ${aws.sdk.version} + + + + com.amazonaws + aws-java-sdk-core + + + + + com.amazonaws + aws-java-sdk-core + ${aws.sdk.version} + + + commons-codec + commons-codec + + + com.amazonaws + aws-java-sdk-s3 + ${aws.sdk.version} + + + org.apache.druid.extensions + druid-orc-extensions + ${project.parent.version} + + + javax.servlet + servlet-api + + + com.squareup.okhttp + okhttp + + + + + org.apache.druid.extensions + druid-parquet-extensions + ${project.parent.version} + + + org.apache.druid.extensions + druid-avro-extensions + ${project.parent.version} + + + org.apache.druid.extensions + druid-protobuf-extensions + ${project.parent.version} + + + org.apache.druid.extensions + druid-s3-extensions + ${project.parent.version} + + + org.apache.druid.extensions + druid-kinesis-indexing-service + ${project.parent.version} + + + org.apache.druid.extensions + druid-azure-extensions + ${project.parent.version} + + + org.apache.druid.extensions + druid-google-extensions + ${project.parent.version} + + + org.apache.druid.extensions + druid-hdfs-storage + ${project.parent.version} + + + com.amazonaws + aws-java-sdk-bundle + + + + + org.apache.druid.extensions + druid-datasketches + ${project.parent.version} + + + org.apache.druid.extensions + druid-histogram + ${project.parent.version} + + + org.apache.druid + druid-aws-common + ${project.parent.version} + + + org.apache.druid + druid-processing + ${project.parent.version} + + + org.apache.druid + druid-sql + ${project.parent.version} + + + org.apache.druid + druid-indexing-service + ${project.parent.version} + + + org.apache.druid.extensions + mysql-metadata-storage + ${project.parent.version} + + + org.apache.druid.extensions + druid-kafka-indexing-service + ${project.parent.version} + + + org.apache.kafka + kafka-clients + + + + + org.apache.druid.extensions + druid-basic-security + ${project.parent.version} + + + org.apache.druid.extensions + druid-lookups-cached-global + ${project.parent.version} + + + org.apache.druid.extensions + druid-testing-tools + ${project.parent.version} + + + org.apache.druid.extensions + simple-client-sslcontext + ${project.parent.version} + + + org.apache.druid.extensions + druid-multi-stage-query + ${project.parent.version} + + + org.apache.druid + druid-services + ${project.parent.version} + + + org.apache.druid + druid-server + ${project.parent.version} + + + commons-io + commons-io + + + joda-time + joda-time + + + com.fasterxml.jackson.core + jackson-annotations + + + com.google.inject + guice + + + com.google.inject.extensions + guice-multibindings + + + com.fasterxml.jackson.core + jackson-databind + + + org.apache.kafka + kafka-clients + ${apache.kafka.version} + + + javax.ws.rs + jsr311-api + + + io.netty + netty + + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.jaxrs + jackson-jaxrs-smile-provider + + + com.google.guava + guava + + + jakarta.validation + jakarta.validation-api + + + jakarta.inject + jakarta.inject-api + + + org.apache.logging.log4j + log4j-slf4j-impl + + + com.google.code.findbugs + jsr305 + + + com.github.docker-java + docker-java + + + com.github.docker-java + docker-java-transport-jersey + + + + + com.github.docker-java + docker-java-transport-netty + + + com.github.docker-java + docker-java-api + + + com.github.docker-java + docker-java-core + + + net.hydromatic + quidem + test + + + io.netty + netty-transport-native-kqueue + osx-x86_64 + runtime + + + io.netty + netty-transport-native-kqueue + osx-aarch_64 + runtime + + + org.eclipse.jetty + jetty-servlet + ${jetty.version} + + + org.eclipse.jetty + jetty-server + ${jetty.version} + + + com.google.inject.extensions + guice-servlet + + + io.confluent + kafka-schema-registry-client + 6.2.12 + + + org.slf4j + slf4j-log4j12 + + + org.apache.avro + avro + + + com.fasterxml.jackson.core + jackson-databind + + + javax.ws.rs + javax.ws.rs-api + + + javax.ws.rs + javax.ws.rs-api + + + javax.ws.rs + jsr311-api + + + jakarta.ws.rs + jakarta.ws.rs-api + + + + + io.confluent + kafka-protobuf-provider + 6.2.12 + + + com.google.protobuf + protobuf-java + + + + org.apache.druid + druid-sql + test-jar + ${project.parent.version} + + + + org.apache.druid + druid-processing + test-jar + ${project.parent.version} + + + org.apache.druid + druid-server + test-jar + ${project.parent.version} + + + + org.apache.druid.extensions + druid-datasketches + test-jar + ${project.parent.version} + compile + + + + org.slf4j + slf4j-simple + true + + + + org.easymock + easymock + compile + + + org.reflections + reflections + compile + + + + + + org.junit.jupiter + junit-jupiter + + + org.junit.vintage + junit-vintage-engine + test + + + org.junit.jupiter + junit-jupiter-api + + + junit + junit + + + org.apache.calcite.avatica + avatica + + + org.apache.calcite.avatica + avatica-core + + + org.apache.calcite + calcite-testkit + test + + + com.github.os72 + protobuf-dynamic + 0.9.3 + compile + + + com.amazonaws + aws-java-sdk-sts + ${aws.sdk.version} + runtime + + + org.apache.datasketches + datasketches-java + runtime + + + org.hamcrest + hamcrest-all + compile + + + org.hamcrest + hamcrest-core + compile + + + + + + + org.owasp + dependency-check-maven + + true + + + + org.apache.maven.plugins + maven-dependency-plugin + + true + + + + org.codehaus.mojo + exec-maven-plugin + + java + + -classpath + + -Dquidem.uri=${quidem.uri} + -Dquidem.record.autostart=${quidem.record.autostart} + org.apache.druid.quidem.Launcher + + + + + + diff --git a/quidem-ut/src/main/java/org/apache/druid/quidem/ExposedAsBrokerQueryComponentSupplierWrapper.java b/quidem-ut/src/main/java/org/apache/druid/quidem/ExposedAsBrokerQueryComponentSupplierWrapper.java new file mode 100644 index 00000000000..f853dd317d1 --- /dev/null +++ b/quidem-ut/src/main/java/org/apache/druid/quidem/ExposedAsBrokerQueryComponentSupplierWrapper.java @@ -0,0 +1,370 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.quidem; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.inject.AbstractModule; +import com.google.inject.Injector; +import com.google.inject.Key; +import com.google.inject.Module; +import com.google.inject.Provides; +import com.google.inject.name.Names; +import org.apache.druid.cli.CliBroker; +import org.apache.druid.cli.QueryJettyServerInitializer; +import org.apache.druid.client.BrokerSegmentWatcherConfig; +import org.apache.druid.client.BrokerServerView; +import org.apache.druid.client.InternalQueryConfig; +import org.apache.druid.client.TimelineServerView; +import org.apache.druid.client.selector.CustomTierSelectorStrategyConfig; +import org.apache.druid.client.selector.ServerSelectorStrategy; +import org.apache.druid.client.selector.TierSelectorStrategy; +import org.apache.druid.curator.CuratorModule; +import org.apache.druid.curator.discovery.DiscoveryModule; +import org.apache.druid.discovery.DruidNodeDiscoveryProvider; +import org.apache.druid.guice.AnnouncerModule; +import org.apache.druid.guice.BrokerProcessingModule; +import org.apache.druid.guice.BrokerServiceModule; +import org.apache.druid.guice.BuiltInTypesModule; +import org.apache.druid.guice.CoordinatorDiscoveryModule; +import org.apache.druid.guice.DruidInjectorBuilder; +import org.apache.druid.guice.ExpressionModule; +import org.apache.druid.guice.ExtensionsModule; +import org.apache.druid.guice.JacksonConfigManagerModule; +import org.apache.druid.guice.JavaScriptModule; +import org.apache.druid.guice.Jerseys; +import org.apache.druid.guice.JoinableFactoryModule; +import org.apache.druid.guice.JsonConfigProvider; +import org.apache.druid.guice.LazySingleton; +import org.apache.druid.guice.LifecycleModule; +import org.apache.druid.guice.LocalDataStorageDruidModule; +import org.apache.druid.guice.MetadataConfigModule; +import org.apache.druid.guice.QueryRunnerFactoryModule; +import org.apache.druid.guice.SegmentWranglerModule; +import org.apache.druid.guice.ServerModule; +import org.apache.druid.guice.ServerTypeConfig; +import org.apache.druid.guice.ServerViewModule; +import org.apache.druid.guice.StartupLoggingModule; +import org.apache.druid.guice.StorageNodeModule; +import org.apache.druid.guice.annotations.Client; +import org.apache.druid.guice.annotations.EscalatedClient; +import org.apache.druid.guice.annotations.Json; +import org.apache.druid.guice.http.HttpClientModule; +import org.apache.druid.guice.security.AuthenticatorModule; +import org.apache.druid.guice.security.AuthorizerModule; +import org.apache.druid.guice.security.DruidAuthModule; +import org.apache.druid.initialization.CoreInjectorBuilder; +import org.apache.druid.initialization.Log4jShutterDownerModule; +import org.apache.druid.initialization.ServerInjectorBuilder; +import org.apache.druid.initialization.TombstoneDataStorageModule; +import org.apache.druid.java.util.common.io.Closer; +import org.apache.druid.metadata.storage.derby.DerbyMetadataStorageDruidModule; +import org.apache.druid.query.QueryRunnerFactoryConglomerate; +import org.apache.druid.query.QuerySegmentWalker; +import org.apache.druid.query.RetryQueryRunnerConfig; +import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider; +import org.apache.druid.rpc.guice.ServiceClientModule; +import org.apache.druid.segment.join.JoinableFactoryWrapper; +import org.apache.druid.segment.writeout.SegmentWriteOutMediumModule; +import org.apache.druid.server.BrokerQueryResource; +import org.apache.druid.server.ClientInfoResource; +import org.apache.druid.server.DruidNode; +import org.apache.druid.server.QueryLifecycleFactory; +import org.apache.druid.server.QuerySchedulerProvider; +import org.apache.druid.server.ResponseContextConfig; +import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; +import org.apache.druid.server.SubqueryGuardrailHelper; +import org.apache.druid.server.SubqueryGuardrailHelperProvider; +import org.apache.druid.server.coordination.ServerType; +import org.apache.druid.server.emitter.EmitterModule; +import org.apache.druid.server.http.BrokerResource; +import org.apache.druid.server.http.SelfDiscoveryResource; +import org.apache.druid.server.initialization.AuthorizerMapperModule; +import org.apache.druid.server.initialization.ExternalStorageAccessSecurityModule; +import org.apache.druid.server.initialization.jetty.JettyServerInitializer; +import org.apache.druid.server.initialization.jetty.JettyServerModule; +import org.apache.druid.server.log.NoopRequestLogger; +import org.apache.druid.server.log.RequestLogger; +import org.apache.druid.server.metrics.QueryCountStatsProvider; +import org.apache.druid.server.metrics.SubqueryCountStatsProvider; +import org.apache.druid.server.router.TieredBrokerConfig; +import org.apache.druid.server.security.AuthenticatorMapper; +import org.apache.druid.server.security.Escalator; +import org.apache.druid.server.security.TLSCertificateCheckerModule; +import org.apache.druid.sql.calcite.planner.CalciteRulesManager; +import org.apache.druid.sql.calcite.planner.CatalogResolver; +import org.apache.druid.sql.calcite.run.NativeSqlEngine; +import org.apache.druid.sql.calcite.run.SqlEngine; +import org.apache.druid.sql.calcite.schema.BrokerSegmentMetadataCache; +import org.apache.druid.sql.calcite.schema.DruidSchemaName; +import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.calcite.util.SqlTestFramework; +import org.apache.druid.sql.calcite.util.SqlTestFramework.Builder; +import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerComponentSupplier; +import org.apache.druid.sql.calcite.util.SqlTestFramework.QueryComponentSupplier; +import org.apache.druid.sql.guice.SqlModule; +import org.apache.druid.storage.StorageConnectorModule; +import org.apache.druid.timeline.PruneLoadSpec; +import org.eclipse.jetty.server.Server; + +import java.io.IOException; +import java.util.List; +import java.util.Properties; + +/** + * A wrapper class to expose a {@link QueryComponentSupplier} as a Broker service. + */ +public class ExposedAsBrokerQueryComponentSupplierWrapper implements QueryComponentSupplier +{ + private QueryComponentSupplier delegate; + + public ExposedAsBrokerQueryComponentSupplierWrapper(QueryComponentSupplier delegate) + { + this.delegate = delegate; + } + + @Override + public void gatherProperties(Properties properties) + { + delegate.gatherProperties(properties); + } + + @Override + public void configureGuice(DruidInjectorBuilder builder) + { + } + + @Override + public void configureGuice(CoreInjectorBuilder builder, List overrideModules) + { + delegate.configureGuice(builder); + + installForServerModules(builder); + + overrideModules.addAll(ExposedAsBrokerQueryComponentSupplierWrapper.brokerModules()); + overrideModules.add(new BrokerTestModule()); + builder.add(QuidemCaptureModule.class); + } + + @Override + public QueryRunnerFactoryConglomerate createCongolmerate(Builder builder, Closer closer, ObjectMapper om) + { + return delegate.createCongolmerate(builder, closer, om); + } + + @Override + public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(QueryRunnerFactoryConglomerate conglomerate, + JoinableFactoryWrapper joinableFactory, Injector injector) + { + return delegate.createQuerySegmentWalker(conglomerate, joinableFactory, injector); + } + + @Override + public SqlEngine createEngine(QueryLifecycleFactory qlf, ObjectMapper objectMapper, Injector injector) + { + return delegate.createEngine(qlf, objectMapper, injector); + } + + @Override + public void configureJsonMapper(ObjectMapper mapper) + { + delegate.configureJsonMapper(mapper); + } + + @Override + public JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider) + { + return delegate.createJoinableFactoryWrapper(lookupProvider); + } + + @Override + public void finalizeTestFramework(SqlTestFramework sqlTestFramework) + { + delegate.finalizeTestFramework(sqlTestFramework); + } + + @Override + public void close() throws IOException + { + delegate.close(); + } + + @Override + public PlannerComponentSupplier getPlannerComponentSupplier() + { + return delegate.getPlannerComponentSupplier(); + } + + public static class BrokerTestModule extends AbstractModule + { + @Override + protected void configure() + { + bind(AuthenticatorMapper.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_MAPPER); + bind(Escalator.class).toInstance(CalciteTests.TEST_AUTHENTICATOR_ESCALATOR); + bind(RequestLogger.class).toInstance(new NoopRequestLogger()); + bind(String.class) + .annotatedWith(DruidSchemaName.class) + .toInstance(CalciteTests.DRUID_SCHEMA_NAME); + bind(QuerySchedulerProvider.class).in(LazySingleton.class); + bind(CalciteRulesManager.class).toInstance(new CalciteRulesManager(ImmutableSet.of())); + bind(CatalogResolver.class).toInstance(CatalogResolver.NULL_RESOLVER); + } + + @Provides + @LazySingleton + public BrokerSegmentMetadataCache provideCache() + { + return null; + } + + @Provides + @LazySingleton + public Properties getProps() + { + Properties localProps = new Properties(); + localProps.put("druid.enableTlsPort", "false"); + localProps.put("druid.zk.service.enabled", "false"); + localProps.put("druid.plaintextPort", "12345"); + localProps.put("druid.host", "localhost"); + localProps.put("druid.broker.segment.awaitInitializationOnStart", "false"); + return localProps; + } + + @Provides + @LazySingleton + public SqlEngine createMockSqlEngine( + final QuerySegmentWalker walker, + final QueryRunnerFactoryConglomerate conglomerate, + @Json ObjectMapper jsonMapper) + { + return new NativeSqlEngine(CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate), jsonMapper); + } + + @Provides + @LazySingleton + DruidNodeDiscoveryProvider getDruidNodeDiscoveryProvider() + { + final DruidNode coordinatorNode = CalciteTests.mockCoordinatorNode(); + return CalciteTests.mockDruidNodeDiscoveryProvider(coordinatorNode); + } + } + + /** + * Closely related to {@link CoreInjectorBuilder#forServer()} + */ + private void installForServerModules(CoreInjectorBuilder builder) + { + + builder.add( + new Log4jShutterDownerModule(), + new LifecycleModule(), + ExtensionsModule.SecondaryModule.class, + new DruidAuthModule(), + TLSCertificateCheckerModule.class, + EmitterModule.class, + HttpClientModule.global(), + HttpClientModule.escalatedGlobal(), + new HttpClientModule("druid.broker.http", Client.class, true), + new HttpClientModule("druid.broker.http", EscalatedClient.class, true), + new CuratorModule(), + new AnnouncerModule(), + new SegmentWriteOutMediumModule(), + new ServerModule(), + new StorageNodeModule(), + new JettyServerModule(), + new ExpressionModule(), + new BuiltInTypesModule(), + new DiscoveryModule(), + new ServerViewModule(), + new MetadataConfigModule(), + new DerbyMetadataStorageDruidModule(), + new JacksonConfigManagerModule(), + new CoordinatorDiscoveryModule(), + new LocalDataStorageDruidModule(), + new TombstoneDataStorageModule(), + new JavaScriptModule(), + new AuthenticatorModule(), + new AuthorizerModule(), + new AuthorizerMapperModule(), + new StartupLoggingModule(), + new ExternalStorageAccessSecurityModule(), + new ServiceClientModule(), + new StorageConnectorModule(), + new SqlModule(), + ServerInjectorBuilder.registerNodeRoleModule(ImmutableSet.of()) + ); + } + + /** + * Closely related to {@link CliBroker#getModules}. + */ + static List brokerModules() + { + return ImmutableList.of( + new BrokerProcessingModule(), + new QueryRunnerFactoryModule(), + new SegmentWranglerModule(), + new JoinableFactoryModule(), + new BrokerServiceModule(), + binder -> { + + binder.bindConstant().annotatedWith(Names.named("serviceName")).to( + TieredBrokerConfig.DEFAULT_BROKER_SERVICE_NAME + ); + binder.bindConstant().annotatedWith(Names.named("servicePort")).to(8082); + binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(8282); + binder.bindConstant().annotatedWith(PruneLoadSpec.class).to(true); + binder.bind(ResponseContextConfig.class).toInstance(ResponseContextConfig.newConfig(false)); + + binder.bind(TimelineServerView.class).to(BrokerServerView.class).in(LazySingleton.class); + + JsonConfigProvider.bind(binder, "druid.broker.select", TierSelectorStrategy.class); + JsonConfigProvider.bind(binder, "druid.broker.select.tier.custom", CustomTierSelectorStrategyConfig.class); + JsonConfigProvider.bind(binder, "druid.broker.balancer", ServerSelectorStrategy.class); + JsonConfigProvider.bind(binder, "druid.broker.retryPolicy", RetryQueryRunnerConfig.class); + JsonConfigProvider.bind(binder, "druid.broker.segment", BrokerSegmentWatcherConfig.class); + JsonConfigProvider.bind(binder, "druid.broker.internal.query.config", InternalQueryConfig.class); + binder.bind(JettyServerInitializer.class).to(QueryJettyServerInitializer.class).in(LazySingleton.class); + + binder.bind(BrokerQueryResource.class).in(LazySingleton.class); + Jerseys.addResource(binder, BrokerQueryResource.class); + binder.bind(SubqueryGuardrailHelper.class).toProvider(SubqueryGuardrailHelperProvider.class); + binder.bind(QueryCountStatsProvider.class).to(BrokerQueryResource.class).in(LazySingleton.class); + binder.bind(SubqueryCountStatsProvider.class).toInstance(new SubqueryCountStatsProvider()); + Jerseys.addResource(binder, BrokerResource.class); + Jerseys.addResource(binder, ClientInfoResource.class); + + LifecycleModule.register(binder, BrokerQueryResource.class); + + LifecycleModule.register(binder, Server.class); + binder.bind(ServerTypeConfig.class).toInstance(new ServerTypeConfig(ServerType.BROKER)); + + binder.bind(String.class) + .annotatedWith(DruidSchemaName.class) + .toInstance(CalciteTests.DRUID_SCHEMA_NAME); + + Jerseys.addResource(binder, SelfDiscoveryResource.class); + LifecycleModule.registerKey(binder, Key.get(SelfDiscoveryResource.class)); + } + ); + } +} diff --git a/quidem-ut/src/main/java/org/apache/druid/quidem/Launcher.java b/quidem-ut/src/main/java/org/apache/druid/quidem/Launcher.java new file mode 100644 index 00000000000..b185084e0a7 --- /dev/null +++ b/quidem-ut/src/main/java/org/apache/druid/quidem/Launcher.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.quidem; + +import com.google.common.base.Stopwatch; +import org.apache.druid.cli.GuiceRunnable; +import org.apache.druid.java.util.common.lifecycle.Lifecycle; +import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.sql.calcite.SqlTestFrameworkConfig; +import org.apache.druid.sql.calcite.SqlTestFrameworkConfig.ConfigurationInstance; +import org.apache.druid.sql.calcite.SqlTestFrameworkConfig.SqlTestFrameworkConfigStore; +import org.apache.druid.sql.calcite.util.SqlTestFramework; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; + +import java.util.Map.Entry; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + +public class Launcher +{ + static final SqlTestFrameworkConfigStore CONFIG_STORE = new SqlTestFrameworkConfigStore( + x -> new ExposedAsBrokerQueryComponentSupplierWrapper(x) + ); + private static final String QUIDEM_URI = "quidem.uri"; + private static Logger log = new Logger(Launcher.class); + private final SqlTestFramework framework; + private final ConfigurationInstance configurationInstance; + private Lifecycle lifecycle; + + public Launcher(String uri) throws Exception + { + SqlTestFrameworkConfig config = SqlTestFrameworkConfig.fromURL(uri); + configurationInstance = CONFIG_STORE.getConfigurationInstance(config); + framework = configurationInstance.framework; + } + + public void start() throws Exception + { + lifecycle = GuiceRunnable.initLifecycle(framework.injector(), log); + if (withAutoStart()) { + callQuidemStart(); + } + } + + private void callQuidemStart() throws Exception + { + CloseableHttpClient client = HttpClients.createDefault(); + HttpGet request = new HttpGet("http://localhost:12345/quidem/start"); + request.addHeader("Content-Type", "application/json"); + client.execute(request); + } + + private boolean withAutoStart() + { + return Boolean.valueOf(System.getProperty("quidem.record.autostart", "false")); + } + + public void shutdown() + { + lifecycle.stop(); + } + + public static void main(String[] args) throws Exception + { + String quidemUri = System.getProperty(QUIDEM_URI, "druidtest:///"); + Properties p = System.getProperties(); + for (Entry entry : p.entrySet()) { + Object key = entry.getKey(); + if (key.toString().startsWith("quidem")) { + log.info("[%s] -> %s", key, entry.getValue()); + } + } + log.info("Starting Quidem with URI[%s]", quidemUri); + Stopwatch stopwatch = Stopwatch.createStarted(); + Launcher launcher = new Launcher(quidemUri); + log.info("Framework creation time: %d ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); + launcher.start(); + log.info("Total time to launch: %d ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); + launcher.lifecycle.join(); + } +} diff --git a/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemCaptureModule.java b/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemCaptureModule.java new file mode 100644 index 00000000000..17d9009994e --- /dev/null +++ b/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemCaptureModule.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.quidem; + +import com.google.inject.Binder; +import com.google.inject.Module; +import org.apache.druid.guice.Jerseys; + +public class QuidemCaptureModule implements Module +{ + + @Override + public void configure(Binder binder) + { + Jerseys.addResource(binder, QuidemCaptureResource.class); + } +} diff --git a/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemCaptureResource.java b/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemCaptureResource.java new file mode 100644 index 00000000000..68d2f31ac81 --- /dev/null +++ b/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemCaptureResource.java @@ -0,0 +1,81 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.quidem; + +import com.google.inject.Inject; +import org.apache.druid.guice.LazySingleton; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.sql.hook.DruidHookDispatcher; + +import javax.inject.Named; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +import java.io.File; +import java.net.URI; + +@Path("/quidem") +@LazySingleton +public class QuidemCaptureResource +{ + public static final File RECORD_PATH = ProjectPathUtils + .getPathFromProjectRoot("quidem-ut/src/test/quidem/org.apache.druid.quidem.QTest"); + private URI quidemURI; + private QuidemRecorder recorder = null; + private DruidHookDispatcher hookDispatcher; + + @Inject + public QuidemCaptureResource(@Named("quidem") URI quidemURI, DruidHookDispatcher hookDispatcher) + { + this.quidemURI = quidemURI; + this.hookDispatcher = hookDispatcher; + } + + @GET + @Path("/start") + @Produces(MediaType.TEXT_PLAIN) + public synchronized String start() + { + stopIfRunning(); + recorder = new QuidemRecorder( + quidemURI, + hookDispatcher, + genRecordFilePath() + ); + return recorder.toString(); + } + + private File genRecordFilePath() + { + String fileName = StringUtils.format("record-%d.iq", System.currentTimeMillis()); + return new File(RECORD_PATH, fileName); + } + + private synchronized void stopIfRunning() + { + if (recorder != null) { + recorder.close(); + recorder = null; + } + + } +} diff --git a/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemRecorder.java b/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemRecorder.java new file mode 100644 index 00000000000..5f2d6a192ed --- /dev/null +++ b/quidem-ut/src/main/java/org/apache/druid/quidem/QuidemRecorder.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.quidem; + +import org.apache.druid.sql.hook.DruidHook; +import org.apache.druid.sql.hook.DruidHookDispatcher; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.PrintStream; +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.nio.charset.StandardCharsets; +import java.util.Date; + +public class QuidemRecorder implements AutoCloseable, DruidHook +{ + private PrintStream printStream; + private File file; + private DruidHookDispatcher hookDispatcher; + + public QuidemRecorder(URI quidemURI, DruidHookDispatcher hookDispatcher, File file) + { + this.hookDispatcher = hookDispatcher; + this.file = file; + try { + this.printStream = new PrintStream(new FileOutputStream(file), true, StandardCharsets.UTF_8.name()); + } + catch (UnsupportedEncodingException | FileNotFoundException e) { + throw new RuntimeException(e); + } + printStream.println("#started " + new Date()); + printStream.println("!use " + quidemURI); + printStream.println("!set outputformat mysql"); + hookDispatcher.register(DruidHook.SQL, this); + } + + @Override + public synchronized void close() + { + if (printStream != null) { + printStream.close(); + printStream = null; + } + hookDispatcher.unregister(DruidHook.SQL, this); + } + + @Override + public synchronized void invoke(HookKey key, String query) + { + if (DruidHook.SQL.equals(key)) { + printStream.println("# " + new Date()); + printStream.print(query); + printStream.println(";"); + printStream.println("!ok"); + printStream.flush(); + return; + } + } + + @Override + public String toString() + { + return "QuidemRecorder [file=" + file + "]"; + } + +} diff --git a/quidem-ut/src/test/java/org/apache/druid/quidem/LauncherSmokeTest.java b/quidem-ut/src/test/java/org/apache/druid/quidem/LauncherSmokeTest.java new file mode 100644 index 00000000000..6402cd63e43 --- /dev/null +++ b/quidem-ut/src/test/java/org/apache/druid/quidem/LauncherSmokeTest.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.quidem; + +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class LauncherSmokeTest +{ + private static Launcher launcher; + + @BeforeClass + public static void setUp() throws Exception + { + launcher = new Launcher("druidtest:///"); + launcher.start(); + } + + @AfterClass + public static void tearDown() + { + launcher.shutdown(); + } + + @Test + public void chkSelectFromFoo() throws Exception + { + CloseableHttpClient client = HttpClients.createDefault(); + HttpPost request = new HttpPost("http://localhost:12345/druid/v2/sql"); + request.addHeader("Content-Type", "application/json"); + request.setEntity(new StringEntity("{\"query\":\"Select * from foo\"}")); + CloseableHttpResponse response = client.execute(request); + assertEquals(200, response.getStatusLine().getStatusCode()); + } + + @Test + public void chkStatusWorks() throws Exception + { + CloseableHttpClient client = HttpClients.createDefault(); + HttpGet request = new HttpGet("http://localhost:12345/status"); + request.addHeader("Content-Type", "application/json"); + CloseableHttpResponse response = client.execute(request); + assertEquals(200, response.getStatusLine().getStatusCode()); + String responseStr = EntityUtils.toString(response.getEntity()); + MatcherAssert.assertThat(responseStr, Matchers.containsString("\"version\":\"")); + } +} diff --git a/quidem-ut/src/test/java/org/apache/druid/quidem/QTest.java b/quidem-ut/src/test/java/org/apache/druid/quidem/QTest.java new file mode 100644 index 00000000000..83167a18a16 --- /dev/null +++ b/quidem-ut/src/test/java/org/apache/druid/quidem/QTest.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.quidem; + +import org.junit.jupiter.api.Test; + +import java.io.File; +import java.io.IOException; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + +public class QTest extends DruidQuidemTestBase +{ + public QTest() + { + super(); + } + + @Override + protected File getTestRoot() + { + return ProjectPathUtils.getPathFromProjectRoot("quidem-ut/src/test/quidem/" + getClass().getName()); + } + + @Test + public void ensureNoRecordFilesPresent() throws IOException + { + // ensure that the captured ones are saved into this test's input path + assertEquals(QuidemCaptureResource.RECORD_PATH, getTestRoot()); + for (String name : getFileNames()) { + if (name.startsWith("record-")) { + fail("Record file found: " + name); + } + } + } +} diff --git a/quidem-ut/src/test/quidem/org.apache.druid.quidem.QTest/example.iq b/quidem-ut/src/test/quidem/org.apache.druid.quidem.QTest/example.iq new file mode 100644 index 00000000000..3a8105927ee --- /dev/null +++ b/quidem-ut/src/test/quidem/org.apache.druid.quidem.QTest/example.iq @@ -0,0 +1,46 @@ +#started +!use druidtest:/// +!set outputformat mysql + +select * from numfoo; ++-------------------------+------+------+-----------+------+------+------+-----+-----+-----+-----+--------+--------+-----+-----+-----+--------------------+ +| __time | dim1 | dim2 | dim3 | dim4 | dim5 | dim6 | d1 | d2 | f1 | f2 | l1 | l2 | cnt | m1 | m2 | unique_dim1 | ++-------------------------+------+------+-----------+------+------+------+-----+-----+-----+-----+--------+--------+-----+-----+-----+--------------------+ +| 2000-01-01 00:00:00.000 | | a | ["a","b"] | a | aa | 1 | 1.0 | | 1.0 | | 7 | | 1 | 1.0 | 1.0 | "AQAAAEAAAA==" | +| 2000-01-02 00:00:00.000 | 10.1 | | ["b","c"] | a | ab | 2 | 1.7 | 1.7 | 0.1 | 0.1 | 325323 | 325323 | 1 | 2.0 | 2.0 | "AQAAAQAAAAHNBA==" | +| 2000-01-03 00:00:00.000 | 2 | | d | a | ba | 3 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | 0 | 1 | 3.0 | 3.0 | "AQAAAQAAAAOzAg==" | +| 2001-01-01 00:00:00.000 | 1 | a | | b | ad | 4 | | | | | | | 1 | 4.0 | 4.0 | "AQAAAQAAAAFREA==" | +| 2001-01-02 00:00:00.000 | def | abc | | b | aa | 5 | | | | | | | 1 | 5.0 | 5.0 | "AQAAAQAAAACyEA==" | +| 2001-01-03 00:00:00.000 | abc | | | b | ab | 6 | | | | | | | 1 | 6.0 | 6.0 | "AQAAAQAAAAEkAQ==" | ++-------------------------+------+------+-----------+------+------+------+-----+-----+-----+-----+--------+--------+-----+-----+-----+--------------------+ +(6 rows) + +!ok +select * from numfoo; ++-------------------------+------+------+-----------+------+------+------+-----+-----+-----+-----+--------+--------+-----+-----+-----+--------------------+ +| __time | dim1 | dim2 | dim3 | dim4 | dim5 | dim6 | d1 | d2 | f1 | f2 | l1 | l2 | cnt | m1 | m2 | unique_dim1 | ++-------------------------+------+------+-----------+------+------+------+-----+-----+-----+-----+--------+--------+-----+-----+-----+--------------------+ +| 2000-01-01 00:00:00.000 | | a | ["a","b"] | a | aa | 1 | 1.0 | | 1.0 | | 7 | | 1 | 1.0 | 1.0 | "AQAAAEAAAA==" | +| 2000-01-02 00:00:00.000 | 10.1 | | ["b","c"] | a | ab | 2 | 1.7 | 1.7 | 0.1 | 0.1 | 325323 | 325323 | 1 | 2.0 | 2.0 | "AQAAAQAAAAHNBA==" | +| 2000-01-03 00:00:00.000 | 2 | | d | a | ba | 3 | 0.0 | 0.0 | 0.0 | 0.0 | 0 | 0 | 1 | 3.0 | 3.0 | "AQAAAQAAAAOzAg==" | +| 2001-01-01 00:00:00.000 | 1 | a | | b | ad | 4 | | | | | | | 1 | 4.0 | 4.0 | "AQAAAQAAAAFREA==" | +| 2001-01-02 00:00:00.000 | def | abc | | b | aa | 5 | | | | | | | 1 | 5.0 | 5.0 | "AQAAAQAAAACyEA==" | +| 2001-01-03 00:00:00.000 | abc | | | b | ab | 6 | | | | | | | 1 | 6.0 | 6.0 | "AQAAAQAAAAEkAQ==" | ++-------------------------+------+------+-----------+------+------+------+-----+-----+-----+-----+--------+--------+-----+-----+-----+--------------------+ +(6 rows) + +!ok +select length(dim1) from numfoo; ++--------+ +| EXPR$0 | ++--------+ +| 0 | +| 1 | +| 1 | +| 3 | +| 3 | +| 4 | ++--------+ +(6 rows) + +!ok diff --git a/server/src/main/java/org/apache/druid/server/StatusResource.java b/server/src/main/java/org/apache/druid/server/StatusResource.java index e15ddaf43bd..cf0ed113c47 100644 --- a/server/src/main/java/org/apache/druid/server/StatusResource.java +++ b/server/src/main/java/org/apache/druid/server/StatusResource.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Maps; import com.sun.jersey.spi.container.ResourceFilters; import org.apache.druid.client.DruidServerConfig; +import org.apache.druid.common.guava.GuavaUtils; import org.apache.druid.guice.ExtensionsLoader; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.StringUtils; @@ -144,7 +145,7 @@ public class StatusResource private String getDruidVersion() { - return Status.class.getPackage().getImplementationVersion(); + return GuavaUtils.firstNonNull(Status.class.getPackage().getImplementationVersion(), "unknown"); } @JsonProperty diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java index 8d3b6a743e3..c42c237ee36 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java @@ -40,7 +40,6 @@ import org.apache.calcite.rel.rules.DateRangeRules; import org.apache.calcite.rel.rules.JoinPushThroughJoinRule; import org.apache.calcite.rel.rules.ProjectMergeRule; import org.apache.calcite.rel.rules.PruneEmptyRules; -import org.apache.calcite.runtime.Hook; import org.apache.calcite.sql.SqlExplainFormat; import org.apache.calcite.sql.SqlExplainLevel; import org.apache.calcite.sql2rel.RelDecorrelator; @@ -70,6 +69,7 @@ import org.apache.druid.sql.calcite.rule.SortCollapseRule; import org.apache.druid.sql.calcite.rule.logical.DruidAggregateRemoveRedundancyRule; import org.apache.druid.sql.calcite.rule.logical.DruidLogicalRules; import org.apache.druid.sql.calcite.run.EngineFeature; +import org.apache.druid.sql.hook.DruidHook; import java.util.ArrayList; import java.util.Collections; @@ -395,7 +395,8 @@ public class CalciteRulesManager public RelNode run(RelOptPlanner planner, RelNode rel, RelTraitSet requiredOutputTraits, List materializations, List lattices) { - Hook.TRIMMED.run(rel); + PlannerContext pctx = planner.getContext().unwrapOrThrow(PlannerContext.class); + pctx.dispatchHook(DruidHook.LOGICAL_PLAN, rel); return rel; } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index b7e2de3e66b..21ad4c23663 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -55,6 +55,7 @@ import org.apache.druid.sql.calcite.rule.ReverseLookupRule; import org.apache.druid.sql.calcite.run.EngineFeature; import org.apache.druid.sql.calcite.run.QueryMaker; import org.apache.druid.sql.calcite.run.SqlEngine; +import org.apache.druid.sql.hook.DruidHook.HookKey; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; @@ -668,4 +669,9 @@ public class PlannerContext return lookupCache.getLookup(lookupName); } + + public void dispatchHook(HookKey key, T object) + { + plannerToolbox.getHookDispatcher().dispatch(key, object); + } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java index a5cdc028e7f..514952972d3 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerFactory.java @@ -49,6 +49,8 @@ import org.apache.druid.sql.calcite.planner.convertlet.DruidConvertletTable; import org.apache.druid.sql.calcite.run.SqlEngine; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.schema.DruidSchemaName; +import org.apache.druid.sql.hook.DruidHook; +import org.apache.druid.sql.hook.DruidHookDispatcher; import java.util.Map; import java.util.Properties; @@ -77,7 +79,8 @@ public class PlannerFactory extends PlannerToolbox final CalciteRulesManager calciteRuleManager, final JoinableFactoryWrapper joinableFactoryWrapper, final CatalogResolver catalog, - final AuthConfig authConfig + final AuthConfig authConfig, + final DruidHookDispatcher hookDispatcher ) { super( @@ -91,7 +94,8 @@ public class PlannerFactory extends PlannerToolbox druidSchemaName, calciteRuleManager, authorizerMapper, - authConfig + authConfig, + hookDispatcher ); } @@ -112,6 +116,7 @@ public class PlannerFactory extends PlannerToolbox queryContext, hook ); + context.dispatchHook(DruidHook.SQL, sql); return new DruidPlanner(buildFrameworkConfig(context), context, engine, hook); } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerHook.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerHook.java index fd245b096d8..8158a314f51 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerHook.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerHook.java @@ -26,6 +26,7 @@ import org.apache.calcite.sql.SqlInsert; import org.apache.calcite.sql.SqlNode; import org.apache.druid.guice.annotations.UnstableApi; import org.apache.druid.sql.calcite.rel.DruidRel; +import org.apache.druid.sql.hook.DruidHookDispatcher; /** * Druid-specific version of Calcite's {@link org.apache.calcite.runtime.Hook Hook} @@ -33,8 +34,11 @@ import org.apache.druid.sql.calcite.rel.DruidRel; * for test validation. Calcite's hook has multiple low-level events, but, sadly, * none at the points where tests want to verify, except for the opportunity to * capture the native query. + * + * Should be removed ; use {@link DruidHookDispatcher} instead. */ @UnstableApi +@Deprecated public interface PlannerHook { void captureSql(String sql); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerToolbox.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerToolbox.java index f0009826118..d8e3a25f458 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerToolbox.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerToolbox.java @@ -26,6 +26,7 @@ import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; +import org.apache.druid.sql.hook.DruidHookDispatcher; public class PlannerToolbox { @@ -40,6 +41,7 @@ public class PlannerToolbox protected final CalciteRulesManager calciteRuleManager; protected final AuthorizerMapper authorizerMapper; protected final AuthConfig authConfig; + protected final DruidHookDispatcher hookDispatcher; public PlannerToolbox( final DruidOperatorTable operatorTable, @@ -52,7 +54,8 @@ public class PlannerToolbox final String druidSchemaName, final CalciteRulesManager calciteRuleManager, final AuthorizerMapper authorizerMapper, - final AuthConfig authConfig + final AuthConfig authConfig, + final DruidHookDispatcher hookDispatcher ) { this.operatorTable = operatorTable; @@ -66,6 +69,7 @@ public class PlannerToolbox this.calciteRuleManager = calciteRuleManager; this.authorizerMapper = authorizerMapper; this.authConfig = authConfig; + this.hookDispatcher = hookDispatcher; } public DruidOperatorTable operatorTable() @@ -117,4 +121,9 @@ public class PlannerToolbox { return authConfig; } + + public DruidHookDispatcher getHookDispatcher() + { + return hookDispatcher; + } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java index 9f15d382286..b543be135cc 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java @@ -47,7 +47,6 @@ import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexNode; -import org.apache.calcite.runtime.Hook; import org.apache.calcite.schema.ScannableTable; import org.apache.calcite.sql.SqlExplain; import org.apache.calcite.sql.SqlNode; @@ -76,6 +75,7 @@ import org.apache.druid.sql.calcite.rel.logical.DruidLogicalNode; import org.apache.druid.sql.calcite.run.EngineFeature; import org.apache.druid.sql.calcite.run.QueryMaker; import org.apache.druid.sql.calcite.table.DruidTable; +import org.apache.druid.sql.hook.DruidHook; import org.apache.druid.utils.Throwables; import javax.annotation.Nullable; @@ -155,7 +155,7 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand isPrepared = true; SqlNode validatedQueryNode = validatedQueryNode(); rootQueryRel = handlerContext.planner().rel(validatedQueryNode); - Hook.CONVERTED.run(rootQueryRel.rel); + handlerContext.plannerContext().dispatchHook(DruidHook.CONVERTED_PLAN, rootQueryRel.rel); handlerContext.hook().captureQueryRel(rootQueryRel); final RelDataTypeFactory typeFactory = rootQueryRel.rel.getCluster().getTypeFactory(); final SqlValidator validator = handlerContext.planner().getValidator(); @@ -563,7 +563,8 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand .plus(DruidLogicalConvention.instance()), newRoot ); - Hook.JAVA_PLAN.run(newRoot); + + plannerContext.dispatchHook(DruidHook.DRUID_PLAN, newRoot); DruidQueryGenerator generator = new DruidQueryGenerator(plannerContext, (DruidLogicalNode) newRoot, rexBuilder); DruidQuery baseQuery = generator.buildQuery(); @@ -591,7 +592,7 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand handlerContext.hook().captureDruidRel(druidRel); - Hook.JAVA_PLAN.run(druidRel); + plannerContext.dispatchHook(DruidHook.DRUID_PLAN, druidRel); if (explain != null) { return planExplanation(possiblyLimitedRoot, druidRel, true); diff --git a/sql/src/main/java/org/apache/druid/sql/hook/DruidHook.java b/sql/src/main/java/org/apache/druid/sql/hook/DruidHook.java new file mode 100644 index 00000000000..5969d608f2f --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/hook/DruidHook.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.hook; + +import com.google.errorprone.annotations.Immutable; +import org.apache.calcite.rel.RelNode; +import java.util.Objects; + +/** + * Interface for hooks that can be invoked by {@link DruidHookDispatcher}. + * + * HookKey should be added at every place a new hook is needed. + */ +@FunctionalInterface +public interface DruidHook +{ + HookKey CONVERTED_PLAN = new HookKey<>("converted", RelNode.class); + HookKey LOGICAL_PLAN = new HookKey<>("logicalPlan", RelNode.class); + HookKey DRUID_PLAN = new HookKey<>("druidPlan", RelNode.class); + HookKey SQL = new HookKey<>("sql", String.class); + + @Immutable + class HookKey + { + private final String label; + private final Class type; + + public HookKey(String label, Class type) + { + this.label = label; + this.type = type; + } + + @Override + public int hashCode() + { + return Objects.hash(label, type); + } + + @Override + public boolean equals(Object obj) + { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + HookKey other = (HookKey) obj; + return Objects.equals(label, other.label) && Objects.equals(type, other.type); + } + } + + void invoke(HookKey key, T object); +} diff --git a/sql/src/main/java/org/apache/druid/sql/hook/DruidHookDispatcher.java b/sql/src/main/java/org/apache/druid/sql/hook/DruidHookDispatcher.java new file mode 100644 index 00000000000..0114699d638 --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/hook/DruidHookDispatcher.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.hook; + +import com.google.inject.Inject; +import org.apache.druid.guice.LazySingleton; +import org.apache.druid.sql.calcite.planner.PlannerContext; +import org.apache.druid.sql.hook.DruidHook.HookKey; + +import java.io.Closeable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Dispatcher for Druid hooks. + * + * A single instance should live in the system and be used to dispatch hooks. + * Usual way to dispatch should be via + * {@link PlannerContext#dispatchHook(HookKey, Object)}. Access to this class is + * also possible thru DruidConnectionExtras. + */ +@LazySingleton +public class DruidHookDispatcher +{ + Map, List>> hooks = new HashMap<>(); + + @Inject + public DruidHookDispatcher() + { + } + + public void register(HookKey label, DruidHook hook) + { + hooks.computeIfAbsent(label, k -> new ArrayList<>()).add(hook); + } + + public void unregister(HookKey key, DruidHook hook) + { + hooks.get(key).remove(hook); + } + + public Closeable withHook(HookKey key, DruidHook hook) + { + register(key, hook); + return new Closeable() + { + @Override + public void close() + { + unregister(key, hook); + } + }; + } + + @SuppressWarnings({"rawtypes", "unchecked"}) + public void dispatch(HookKey key, T object) + { + List> currentHooks = hooks.get(key); + if (currentHooks != null) { + for (DruidHook hook : currentHooks) { + hook.invoke(key, object); + } + } + } + +} diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaDriverTest.java b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaDriverTest.java index efdf782d147..3f039814c0f 100644 --- a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaDriverTest.java +++ b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaDriverTest.java @@ -19,6 +19,7 @@ package org.apache.druid.quidem; +import org.apache.druid.sql.calcite.SqlTestFrameworkConfig; import org.junit.jupiter.api.Test; import java.sql.Connection; @@ -51,6 +52,6 @@ public class DruidAvaticaDriverTest @Test public void testURIParse() throws SQLException { - DruidAvaticaTestDriver.buildConfigfromURIParams("druidtest:///"); + SqlTestFrameworkConfig.fromURL("druidtest:///"); } } diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java index 123019dad04..d2a7b461dfc 100644 --- a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java +++ b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java @@ -70,25 +70,18 @@ import org.apache.druid.sql.calcite.util.SqlTestFramework.Builder; import org.apache.druid.sql.calcite.util.SqlTestFramework.PlannerComponentSupplier; import org.apache.druid.sql.calcite.util.SqlTestFramework.QueryComponentSupplier; import org.apache.druid.sql.guice.SqlModule; -import org.apache.http.NameValuePair; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.http.client.utils.URIBuilder; -import org.apache.http.client.utils.URLEncodedUtils; import org.eclipse.jetty.server.Server; import java.io.Closeable; import java.io.File; import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.Driver; import java.sql.DriverManager; import java.sql.DriverPropertyInfo; import java.sql.SQLException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; import java.util.Properties; import java.util.logging.Logger; @@ -98,10 +91,13 @@ public class DruidAvaticaTestDriver implements Driver new DruidAvaticaTestDriver().register(); } - public static final String URI_PREFIX = "druidtest://"; + public static final String SCHEME = "druidtest"; + public static final String URI_PREFIX = SCHEME + "://"; public static final String DEFAULT_URI = URI_PREFIX + "/"; - static final SqlTestFrameworkConfigStore CONFIG_STORE = new SqlTestFrameworkConfigStore(); + static final SqlTestFrameworkConfigStore CONFIG_STORE = new SqlTestFrameworkConfigStore( + x -> new AvaticaBasedTestConnectionSupplier(x) + ); public DruidAvaticaTestDriver() { @@ -114,13 +110,8 @@ public class DruidAvaticaTestDriver implements Driver return null; } try { - SqlTestFrameworkConfig config = buildConfigfromURIParams(url); - - ConfigurationInstance ci = CONFIG_STORE.getConfigurationInstance( - config, - x -> new AvaticaBasedTestConnectionSupplier(x) - ); - + SqlTestFrameworkConfig config = SqlTestFrameworkConfig.fromURL(url); + ConfigurationInstance ci = CONFIG_STORE.getConfigurationInstance(config); AvaticaJettyServer server = ci.framework.injector().getInstance(AvaticaJettyServer.class); return server.getConnection(info); } @@ -148,9 +139,9 @@ public class DruidAvaticaTestDriver implements Driver @Provides @LazySingleton - public DruidConnectionExtras getConnectionExtras(ObjectMapper objectMapper) + public DruidConnectionExtras getConnectionExtras(ObjectMapper objectMapper, DruidHookDispatcher druidHookDispatcher) { - return new DruidConnectionExtras.DruidConnectionExtrasImpl(objectMapper); + return new DruidConnectionExtras.DruidConnectionExtrasImpl(objectMapper, druidHookDispatcher); } @Provides @@ -349,24 +340,6 @@ public class DruidAvaticaTestDriver implements Driver return tempDir; } - public static SqlTestFrameworkConfig buildConfigfromURIParams(String url) throws SQLException - { - Map queryParams; - queryParams = new HashMap<>(); - try { - List params = URLEncodedUtils.parse(new URI(url), StandardCharsets.UTF_8); - for (NameValuePair pair : params) { - queryParams.put(pair.getName(), pair.getValue()); - } - // possible caveat: duplicate entries overwrite earlier ones - } - catch (URISyntaxException e) { - throw new SQLException("Can't decode URI", e); - } - - return new SqlTestFrameworkConfig(queryParams); - } - private void register() { try { diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidConnectionExtras.java b/sql/src/test/java/org/apache/druid/quidem/DruidConnectionExtras.java index 75bdd4280fa..176411a6bb7 100644 --- a/sql/src/test/java/org/apache/druid/quidem/DruidConnectionExtras.java +++ b/sql/src/test/java/org/apache/druid/quidem/DruidConnectionExtras.java @@ -20,18 +20,25 @@ package org.apache.druid.quidem; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.druid.sql.hook.DruidHookDispatcher; + +import java.sql.Connection; public interface DruidConnectionExtras { ObjectMapper getObjectMapper(); + DruidHookDispatcher getDruidHookDispatcher(); + class DruidConnectionExtrasImpl implements DruidConnectionExtras { private final ObjectMapper objectMapper; + private final DruidHookDispatcher druidHookDispatcher; - public DruidConnectionExtrasImpl(ObjectMapper objectMapper) + public DruidConnectionExtrasImpl(ObjectMapper objectMapper, DruidHookDispatcher druidHookDispatcher) { this.objectMapper = objectMapper; + this.druidHookDispatcher = druidHookDispatcher; } @Override @@ -39,5 +46,19 @@ public interface DruidConnectionExtras { return objectMapper; } + + @Override + public DruidHookDispatcher getDruidHookDispatcher() + { + return druidHookDispatcher; + } + } + + static DruidConnectionExtras unwrapOrThrow(Connection connection) + { + if (connection instanceof DruidConnectionExtras) { + return (DruidConnectionExtras) connection; + } + throw new UnsupportedOperationException("Expected DruidConnectionExtras to be implemented by connection!"); } } diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidQuidemCommandHandler.java b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemCommandHandler.java index 8c2b99d80b6..7ae8f97650c 100644 --- a/sql/src/test/java/org/apache/druid/quidem/DruidQuidemCommandHandler.java +++ b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemCommandHandler.java @@ -35,12 +35,16 @@ import org.apache.druid.query.Query; import org.apache.druid.sql.calcite.BaseCalciteQueryTest; import org.apache.druid.sql.calcite.rel.DruidRel; import org.apache.druid.sql.calcite.util.QueryLogHook; +import org.apache.druid.sql.hook.DruidHook; +import org.apache.druid.sql.hook.DruidHook.HookKey; +import org.apache.druid.sql.hook.DruidHookDispatcher; +import java.io.Closeable; +import java.io.IOException; import java.sql.ResultSet; import java.sql.Statement; import java.util.ArrayList; import java.util.List; -import java.util.function.Consumer; import java.util.stream.Collectors; public class DruidQuidemCommandHandler implements CommandHandler @@ -56,7 +60,7 @@ public class DruidQuidemCommandHandler implements CommandHandler return new LogicalPlanCommand(lines, content); } if (line.startsWith("druidPlan")) { - return new PhysicalPlanCommand(lines, content); + return new DruidPlanCommand(lines, content); } if (line.startsWith("nativePlan")) { return new NativePlanCommand(lines, content); @@ -155,19 +159,22 @@ public class DruidQuidemCommandHandler implements CommandHandler */ abstract static class AbstractRelPlanCommand extends AbstractPlanCommand { - Hook hook; + HookKey hook; - AbstractRelPlanCommand(List lines, List content, Hook hook) + AbstractRelPlanCommand(List lines, List content, DruidHook.HookKey hook) { super(lines, content); this.hook = hook; } @Override - protected final void executeExplain(Context x) + protected final void executeExplain(Context x) throws IOException { + DruidHookDispatcher dhp = unwrapDruidHookDispatcher(x); List logged = new ArrayList<>(); - try (final Hook.Closeable unhook = hook.add((Consumer) logged::add)) { + try (Closeable unhook = dhp.withHook(hook, (key, relNode) -> { + logged.add(relNode); + })) { executeQuery(x); } @@ -179,21 +186,26 @@ public class DruidQuidemCommandHandler implements CommandHandler x.echo(ImmutableList.of(str)); } } + + protected final DruidHookDispatcher unwrapDruidHookDispatcher(Context x) + { + return DruidConnectionExtras.unwrapOrThrow(x.connection()).getDruidHookDispatcher(); + } } static class LogicalPlanCommand extends AbstractRelPlanCommand { LogicalPlanCommand(List lines, List content) { - super(lines, content, Hook.TRIMMED); + super(lines, content, DruidHook.LOGICAL_PLAN); } } - static class PhysicalPlanCommand extends AbstractRelPlanCommand + static class DruidPlanCommand extends AbstractRelPlanCommand { - PhysicalPlanCommand(List lines, List content) + DruidPlanCommand(List lines, List content) { - super(lines, content, Hook.JAVA_PLAN); + super(lines, content, DruidHook.DRUID_PLAN); } } @@ -201,7 +213,7 @@ public class DruidQuidemCommandHandler implements CommandHandler { ConvertedPlanCommand(List lines, List content) { - super(lines, content, Hook.CONVERTED); + super(lines, content, DruidHook.CONVERTED_PLAN); } } } diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java index 6058cb2a8bd..650af0880f9 100644 --- a/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java +++ b/sql/src/test/java/org/apache/druid/quidem/DruidQuidemTestBase.java @@ -29,12 +29,14 @@ import org.apache.calcite.util.Closer; import org.apache.calcite.util.Util; import org.apache.commons.io.filefilter.TrueFileFilter; import org.apache.commons.io.filefilter.WildcardFileFilter; +import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.RE; import org.apache.druid.java.util.common.StringUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; @@ -76,8 +78,12 @@ import static org.junit.jupiter.api.Assertions.fail; * */ @TestInstance(TestInstance.Lifecycle.PER_CLASS) +@ExtendWith(EnabledOnlyInSqlCompatibleMode.class) public abstract class DruidQuidemTestBase { + static { + NullHandling.initializeForTests(); + } public static final String IQ_SUFFIX = ".iq"; /** diff --git a/sql/src/test/java/org/apache/druid/quidem/EnabledOnlyInSqlCompatibleMode.java b/sql/src/test/java/org/apache/druid/quidem/EnabledOnlyInSqlCompatibleMode.java new file mode 100644 index 00000000000..96cf0a72acf --- /dev/null +++ b/sql/src/test/java/org/apache/druid/quidem/EnabledOnlyInSqlCompatibleMode.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.quidem; + +import org.apache.druid.common.config.NullHandling; +import org.junit.jupiter.api.extension.ConditionEvaluationResult; +import org.junit.jupiter.api.extension.ExecutionCondition; +import org.junit.jupiter.api.extension.ExtensionContext; + +public class EnabledOnlyInSqlCompatibleMode implements ExecutionCondition +{ + static { + NullHandling.initializeForTests(); + } + + @Override + public ConditionEvaluationResult evaluateExecutionCondition(ExtensionContext context) + { + if (NullHandling.sqlCompatible()) { + return ConditionEvaluationResult.enabled("SQL compatible mode is enabled"); + } else { + return ConditionEvaluationResult.disabled("SQL compatible mode is disabled"); + } + } + +} diff --git a/sql/src/test/java/org/apache/druid/quidem/SqlQuidemTest.java b/sql/src/test/java/org/apache/druid/quidem/SqlQuidemTest.java index d1922472d7f..9e6a965891c 100644 --- a/sql/src/test/java/org/apache/druid/quidem/SqlQuidemTest.java +++ b/sql/src/test/java/org/apache/druid/quidem/SqlQuidemTest.java @@ -19,20 +19,10 @@ package org.apache.druid.quidem; -import org.apache.druid.common.config.NullHandling; -import org.junit.jupiter.api.condition.EnabledIf; - import java.io.File; -@EnabledIf(value = "enabled", disabledReason = "These tests are only run in SqlCompatible mode!") public class SqlQuidemTest extends DruidQuidemTestBase { - public static boolean enabled() - { - NullHandling.initializeForTests(); - return NullHandling.sqlCompatible(); - } - public SqlQuidemTest() { super(); diff --git a/sql/src/test/java/org/apache/druid/sql/SqlStatementTest.java b/sql/src/test/java/org/apache/druid/sql/SqlStatementTest.java index b332b6dabfa..1a7b6e3a7c5 100644 --- a/sql/src/test/java/org/apache/druid/sql/SqlStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/SqlStatementTest.java @@ -58,6 +58,7 @@ import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.planner.PrepareResult; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.sql.http.SqlQuery; import org.easymock.EasyMock; import org.hamcrest.MatcherAssert; @@ -158,7 +159,8 @@ public class SqlStatementTest new CalciteRulesManager(ImmutableSet.of()), joinableFactoryWrapper, CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); this.sqlStatementFactory = new SqlStatementFactory( diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index cccaab75b6f..518e225c491 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -89,6 +89,7 @@ import org.apache.druid.sql.calcite.schema.NamedSchema; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; import org.apache.druid.sql.guice.SqlModule; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.eclipse.jetty.server.Server; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -1048,7 +1049,8 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase new CalciteRulesManager(ImmutableSet.of()), CalciteTests.createJoinableFactoryWrapper(), CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ) ); } diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index 81cd1c69231..ed855f9c5f8 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -47,6 +47,7 @@ import org.apache.druid.sql.calcite.planner.PlannerFactory; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.junit.Assert; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -112,7 +113,8 @@ public class DruidStatementTest extends CalciteTestBase new CalciteRulesManager(ImmutableSet.of()), joinableFactoryWrapper, CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); this.sqlStatementFactory = CalciteTests.createSqlStatementFactory( CalciteTests.createMockSqlEngine(walker, conglomerate), diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java index 9eda5849fcf..1fc39d52ec5 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfig.java @@ -30,15 +30,23 @@ import com.google.common.collect.Sets; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.query.topn.TopNQueryConfig; +import org.apache.druid.quidem.DruidAvaticaTestDriver; import org.apache.druid.sql.calcite.util.CacheTestHelperModule.ResultCacheMode; import org.apache.druid.sql.calcite.util.SqlTestFramework; import org.apache.druid.sql.calcite.util.SqlTestFramework.QueryComponentSupplier; import org.apache.druid.sql.calcite.util.SqlTestFramework.StandardComponentSupplier; +import org.apache.http.NameValuePair; import org.apache.http.client.utils.URIBuilder; +import org.apache.http.client.utils.URLEncodedUtils; import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.BeforeEachCallback; import org.junit.jupiter.api.extension.ExtensionContext; +import org.reflections.Configuration; import org.reflections.Reflections; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.util.ClasspathHelper; +import org.reflections.util.ConfigurationBuilder; +import org.reflections.util.FilterBuilder; import javax.annotation.Nonnull; import java.io.Closeable; @@ -51,6 +59,8 @@ import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.net.URI; import java.net.URISyntaxException; +import java.nio.charset.StandardCharsets; +import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -224,11 +234,18 @@ public class SqlTestFrameworkConfig public static class SqlTestFrameworkConfigStore implements Closeable { + private final Function queryComponentSupplierWrapper; + + public SqlTestFrameworkConfigStore( + Function queryComponentSupplierWrapper) + { + this.queryComponentSupplierWrapper = queryComponentSupplierWrapper; + } + Map configMap = new HashMap<>(); public ConfigurationInstance getConfigurationInstance( - SqlTestFrameworkConfig config, - Function queryComponentSupplierWrapper) throws Exception + SqlTestFrameworkConfig config) throws Exception { ConfigurationInstance ret = configMap.get(config); if (!configMap.containsKey(config)) { @@ -267,7 +284,7 @@ public class SqlTestFrameworkConfig */ public static class Rule implements AfterAllCallback, BeforeEachCallback { - SqlTestFrameworkConfigStore configStore = new SqlTestFrameworkConfigStore(); + SqlTestFrameworkConfigStore configStore = new SqlTestFrameworkConfigStore(Function.identity()); private SqlTestFrameworkConfig config; private Method method; private String testName; @@ -318,7 +335,7 @@ public class SqlTestFrameworkConfig public SqlTestFramework get() throws Exception { - return configStore.getConfigurationInstance(config, Function.identity()).framework; + return configStore.getConfigurationInstance(config).framework; } public T getAnnotation(Class annotationType) @@ -344,6 +361,7 @@ public class SqlTestFrameworkConfig .minTopNThreshold(config.minTopNThreshold) .mergeBufferCount(config.numMergeBuffers) .withOverrideModule(config.resultCache.makeModule()); + framework = builder.build(); } @@ -393,12 +411,43 @@ public class SqlTestFrameworkConfig if (def.minTopNThreshold != minTopNThreshold) { map.put("minTopNThreshold", String.valueOf(minTopNThreshold)); } + if (def.componentSupplier != componentSupplier) { + map.put("componentSupplier", componentSupplier.getSimpleName()); + } if (!equals(new SqlTestFrameworkConfig(map))) { throw new IAE("Can't reproduce config via map!"); } return map; } + public static SqlTestFrameworkConfig fromURL(String url) throws SQLException + { + + Map queryParams; + queryParams = new HashMap<>(); + try { + URI uri = new URI(url); + if (!DruidAvaticaTestDriver.SCHEME.equals(uri.getScheme())) { + throw new SQLException( + StringUtils.format("URI [%s] is invalid ; only scheme [%s] is supported.", url, DruidAvaticaTestDriver.SCHEME) + ); + } + if (uri.getHost() != null || uri.getPort() != -1) { + throw new SQLException(StringUtils.format("URI [%s] is invalid ; only query parameters are supported.", url)); + } + List params = URLEncodedUtils.parse(uri, StandardCharsets.UTF_8); + for (NameValuePair pair : params) { + queryParams.put(pair.getName(), pair.getValue()); + } + // possible caveat: duplicate entries overwrite earlier ones + } + catch (URISyntaxException e) { + throw new SQLException("Can't decode URI", e); + } + + return new SqlTestFrameworkConfig(queryParams); + } + abstract static class ConfigOptionProcessor { final Class annotationClass; @@ -459,7 +508,15 @@ public class SqlTestFrameworkConfig @Override public Set> load(String pkg) { - return new Reflections(pkg).getSubTypesOf(QueryComponentSupplier.class); + Configuration cfg = new ConfigurationBuilder() + .setScanners(new SubTypesScanner(true)) + .setUrls(ClasspathHelper.forJavaClassPath()) + .filterInputsBy( + new FilterBuilder() + .includePackage(pkg) + .and(s -> s.contains("ComponentSupplier")) + ); + return new Reflections(cfg).getSubTypesOf(QueryComponentSupplier.class); } }); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java index 3a7dd2d22d3..844251e8ac3 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/SqlTestFrameworkConfigTest.java @@ -22,6 +22,7 @@ package org.apache.druid.sql.calcite; import com.google.common.collect.ImmutableMap; import nl.jqno.equalsverifier.EqualsVerifier; import org.apache.druid.java.util.common.IAE; +import org.apache.druid.sql.calcite.DrillWindowQueryTest.DrillComponentSupplier; import org.apache.druid.sql.calcite.SqlTestFrameworkConfig.MinTopNThreshold; import org.apache.druid.sql.calcite.SqlTestFrameworkConfig.NumMergeBuffers; import org.apache.druid.sql.calcite.SqlTestFrameworkConfig.ResultCache; @@ -29,6 +30,7 @@ import org.apache.druid.sql.calcite.util.CacheTestHelperModule.ResultCacheMode; import org.junit.jupiter.api.Test; import java.lang.annotation.Annotation; +import java.net.URI; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -139,4 +141,15 @@ public class SqlTestFrameworkConfigTest ); } + @Test + public void testURI() + { + SqlTestFrameworkConfig c = new SqlTestFrameworkConfig( + ImmutableMap.of( + "componentSupplier", DrillComponentSupplier.class.getSimpleName() + ) + ); + URI uri = c.getDruidTestURI(); + assertEquals("druidtest:///?componentSupplier=DrillComponentSupplier", uri.toString()); + } } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/SqlVectorizedExpressionSanityTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/SqlVectorizedExpressionSanityTest.java index 304400bc3d8..07f57a5deeb 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/SqlVectorizedExpressionSanityTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/SqlVectorizedExpressionSanityTest.java @@ -49,6 +49,7 @@ import org.apache.druid.sql.calcite.planner.PlannerResult; import org.apache.druid.sql.calcite.run.SqlEngine; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.testing.InitializedNullHandlingTest; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.LinearShardSpec; @@ -152,7 +153,8 @@ public class SqlVectorizedExpressionSanityTest extends InitializedNullHandlingTe new CalciteRulesManager(ImmutableSet.of()), joinableFactoryWrapper, CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java index 6155678c845..f4ada1f1c17 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/expression/ExpressionTestHelper.java @@ -62,6 +62,7 @@ import org.apache.druid.sql.calcite.schema.ViewSchema; import org.apache.druid.sql.calcite.table.RowSignatures; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -97,7 +98,8 @@ public class ExpressionTestHelper "druid", new CalciteRulesManager(ImmutableSet.of()), CalciteTests.TEST_AUTHORIZER_MAPPER, - AuthConfig.newBuilder().build() + AuthConfig.newBuilder().build(), + new DruidHookDispatcher() ); public static final PlannerContext PLANNER_CONTEXT = PlannerContext.create( PLANNER_TOOLBOX, @@ -336,7 +338,7 @@ public class ExpressionTestHelper } ExprEval result = PLANNER_CONTEXT.parseExpression(expression.getExpression()) - + .eval(expressionBindings); Assert.assertEquals("Result for: " + rexNode, expectedResult, result.value()); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java index 64a626f682c..fe26ba9db8c 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/external/ExternalTableScanRuleTest.java @@ -39,6 +39,7 @@ import org.apache.druid.sql.calcite.schema.NamedDruidSchema; import org.apache.druid.sql.calcite.schema.NamedViewSchema; import org.apache.druid.sql.calcite.schema.ViewSchema; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Test; @@ -71,7 +72,8 @@ public class ExternalTableScanRuleTest "druid", new CalciteRulesManager(ImmutableSet.of()), CalciteTests.TEST_AUTHORIZER_MAPPER, - AuthConfig.newBuilder().build() + AuthConfig.newBuilder().build(), + new DruidHookDispatcher() ); final PlannerContext plannerContext = PlannerContext.create( toolbox, diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/planner/CalcitePlannerModuleTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/planner/CalcitePlannerModuleTest.java index 43d47733277..89df405b7f1 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/planner/CalcitePlannerModuleTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/planner/CalcitePlannerModuleTest.java @@ -50,6 +50,7 @@ import org.apache.druid.sql.calcite.schema.DruidSchemaName; import org.apache.druid.sql.calcite.schema.NamedSchema; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.easymock.EasyMock; import org.easymock.EasyMockExtension; import org.easymock.Mock; @@ -192,7 +193,8 @@ public class CalcitePlannerModuleTest extends CalciteTestBase "druid", new CalciteRulesManager(ImmutableSet.of()), CalciteTests.TEST_AUTHORIZER_MAPPER, - AuthConfig.newBuilder().build() + AuthConfig.newBuilder().build(), + new DruidHookDispatcher() ); PlannerContext context = PlannerContext.create( @@ -224,7 +226,8 @@ public class CalcitePlannerModuleTest extends CalciteTestBase "druid", new CalciteRulesManager(ImmutableSet.of()), CalciteTests.TEST_AUTHORIZER_MAPPER, - AuthConfig.newBuilder().build() + AuthConfig.newBuilder().build(), + new DruidHookDispatcher() ); PlannerContext contextWithBloat = PlannerContext.create( diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java index 205f3379d71..072bdd6b6e8 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/planner/DruidRexExecutorTest.java @@ -54,6 +54,7 @@ import org.apache.druid.sql.calcite.schema.ViewSchema; import org.apache.druid.sql.calcite.table.RowSignatures; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.testing.InitializedNullHandlingTest; import org.easymock.EasyMock; import org.junit.Assert; @@ -100,7 +101,8 @@ public class DruidRexExecutorTest extends InitializedNullHandlingTest "druid", new CalciteRulesManager(ImmutableSet.of()), CalciteTests.TEST_AUTHORIZER_MAPPER, - AuthConfig.newBuilder().build() + AuthConfig.newBuilder().build(), + new DruidHookDispatcher() ); private static final PlannerContext PLANNER_CONTEXT = PlannerContext.create( PLANNER_TOOLBOX, diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/run/DruidHookTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/run/DruidHookTest.java new file mode 100644 index 00000000000..62190fb0c69 --- /dev/null +++ b/sql/src/test/java/org/apache/druid/sql/calcite/run/DruidHookTest.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite.run; + +import nl.jqno.equalsverifier.EqualsVerifier; +import org.apache.druid.sql.hook.DruidHook; +import org.junit.Test; + +public class DruidHookTest +{ + @Test + public void testHookKeyEquals() + { + EqualsVerifier.forClass(DruidHook.HookKey.class) + .withNonnullFields("label", "type") + .usingGetClass() + .verify(); + } +} diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index 3fdbafb71a8..3d0d0352e60 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -365,18 +365,30 @@ public class CalciteTests return QueryFrameworkUtils.createOperatorTable(INJECTOR); } + + public static DruidNode mockCoordinatorNode() + { + return new DruidNode("test-coordinator", "dummy", false, 8081, null, true, false); + } + + public static FakeDruidNodeDiscoveryProvider mockDruidNodeDiscoveryProvider(final DruidNode coordinatorNode) + { + FakeDruidNodeDiscoveryProvider provider = new FakeDruidNodeDiscoveryProvider( + ImmutableMap.of( + NodeRole.COORDINATOR, new FakeDruidNodeDiscovery(ImmutableMap.of(NodeRole.COORDINATOR, coordinatorNode)) + ) + ); + return provider; + } + public static SystemSchema createMockSystemSchema( final DruidSchema druidSchema, final SpecificSegmentsQuerySegmentWalker walker, final AuthorizerMapper authorizerMapper ) { - final DruidNode coordinatorNode = new DruidNode("test-coordinator", "dummy", false, 8081, null, true, false); - FakeDruidNodeDiscoveryProvider provider = new FakeDruidNodeDiscoveryProvider( - ImmutableMap.of( - NodeRole.COORDINATOR, new FakeDruidNodeDiscovery(ImmutableMap.of(NodeRole.COORDINATOR, coordinatorNode)) - ) - ); + final DruidNode coordinatorNode = mockCoordinatorNode(); + FakeDruidNodeDiscoveryProvider provider = mockDruidNodeDiscoveryProvider(coordinatorNode); final DruidNode overlordNode = new DruidNode("test-overlord", "dummy", false, 8090, null, true, false); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java index 5d710600a11..c43cb5555ad 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SqlTestFramework.java @@ -39,6 +39,7 @@ import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.GlobalTableDataSource; import org.apache.druid.query.QueryRunnerFactoryConglomerate; +import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider; import org.apache.druid.query.topn.TopNQueryConfig; import org.apache.druid.segment.DefaultColumnFormatConfig; @@ -67,8 +68,11 @@ import org.apache.druid.sql.calcite.schema.NoopDruidSchemaManager; import org.apache.druid.sql.calcite.view.DruidViewMacroFactory; import org.apache.druid.sql.calcite.view.InProcessViewManager; import org.apache.druid.sql.calcite.view.ViewManager; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.druid.timeline.DataSegment; +import javax.inject.Named; + import java.io.Closeable; import java.io.IOException; import java.net.URI; @@ -184,6 +188,11 @@ public class SqlTestFramework default void close() throws IOException { } + + default void configureGuice(CoreInjectorBuilder injectorBuilder, List overrideModules) + { + configureGuice(injectorBuilder); + } } public interface PlannerComponentSupplier @@ -494,7 +503,8 @@ public class SqlTestFramework new CalciteRulesManager(componentSupplier.extensionCalciteRules()), framework.injector.getInstance(JoinableFactoryWrapper.class), framework.builder.catalogResolver, - authConfig != null ? authConfig : new AuthConfig() + authConfig != null ? authConfig : new AuthConfig(), + new DruidHookDispatcher() ); componentSupplier.finalizePlanner(this); this.statementFactory = QueryFrameworkUtils.createSqlStatementFactory( @@ -569,7 +579,14 @@ public class SqlTestFramework @Provides @LazySingleton - public SpecificSegmentsQuerySegmentWalker segmentsQuerySegmentWalker(final Injector injector) + public QuerySegmentWalker querySegmentWalker(final Injector injector) + { + return injector.getInstance(SpecificSegmentsQuerySegmentWalker.class); + } + + @Provides + @LazySingleton + public SpecificSegmentsQuerySegmentWalker specificSegmentsQuerySegmentWalker(final Injector injector) { SpecificSegmentsQuerySegmentWalker walker = componentSupplier.createQuerySegmentWalker( injector.getInstance(QueryRunnerFactoryConglomerate.class), @@ -585,10 +602,54 @@ public class SqlTestFramework public QueryLifecycleFactory queryLifecycleFactory(final Injector injector) { return QueryFrameworkUtils.createMockQueryLifecycleFactory( - injector.getInstance(SpecificSegmentsQuerySegmentWalker.class), + injector.getInstance(QuerySegmentWalker.class), injector.getInstance(QueryRunnerFactoryConglomerate.class) ); } + + @Provides + @LazySingleton + ViewManager createViewManager() + { + return componentSupplier.getPlannerComponentSupplier().createViewManager(); + } + + @Provides + @LazySingleton + public DruidSchemaCatalog makeCatalog( + final Injector injector, + final PlannerConfig plannerConfig, + final AuthConfig authConfig, + final ViewManager viewManager, + QueryRunnerFactoryConglomerate conglomerate, + QuerySegmentWalker walker + ) + { + final DruidSchemaCatalog rootSchema = QueryFrameworkUtils.createMockRootSchema( + injector, + conglomerate, + (SpecificSegmentsQuerySegmentWalker) walker, + plannerConfig, + viewManager, + componentSupplier.getPlannerComponentSupplier().createSchemaManager(), + authorizerMapper, + builder.catalogResolver + ); + return rootSchema; + } + + @Provides + SqlTestFrameworkConfig getTestConfig() + { + return builder.config; + } + + @Provides + @Named("quidem") + public URI getDruidTestURI() + { + return getTestConfig().getDruidTestURI(); + } } public static final DruidViewMacroFactory DRUID_VIEW_MACRO_FACTORY = new TestDruidViewMacroFactory(); @@ -613,17 +674,18 @@ public class SqlTestFramework // Ignore load scopes. This is a unit test, not a Druid node. If a // test pulls in a module, then pull in that module, even though we are // not the Druid node to which the module is scoped. - .ignoreLoadScopes() - .addModule(binder -> binder.bind(Closer.class).toInstance(resourceCloser)) - .addModule(new LookylooModule()) - .addModule(new SegmentWranglerModule()) - .addModule(new SqlAggregationModule()) - .addModule(new ExpressionModule()) - .addModule(new TestSetupModule(builder)); - builder.componentSupplier.configureGuice(injectorBuilder); + .ignoreLoadScopes(); + List overrideModules = new ArrayList<>(builder.overrideModules); + overrideModules.add(new LookylooModule()); + overrideModules.add(new SqlAggregationModule()); + overrideModules.add(new SegmentWranglerModule()); + overrideModules.add(new ExpressionModule()); + + overrideModules.add(testSetupModule()); + builder.componentSupplier.configureGuice(injectorBuilder, overrideModules); ServiceInjectorBuilder serviceInjector = new ServiceInjectorBuilder(injectorBuilder); - serviceInjector.addAll(builder.overrideModules); + serviceInjector.addAll(overrideModules); this.injector = serviceInjector.build(); this.engine = builder.componentSupplier.createEngine(queryLifecycleFactory(), queryJsonMapper(), injector); @@ -631,6 +693,11 @@ public class SqlTestFramework componentSupplier.finalizeTestFramework(this); } + public TestSetupModule testSetupModule() + { + return new TestSetupModule(builder); + } + public Injector injector() { return injector; diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java index fa3c1edcea2..665d5873b46 100644 --- a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java +++ b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java @@ -102,6 +102,7 @@ import org.apache.druid.sql.calcite.run.NativeSqlEngine; import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog; import org.apache.druid.sql.calcite.util.CalciteTestBase; import org.apache.druid.sql.calcite.util.CalciteTests; +import org.apache.druid.sql.hook.DruidHookDispatcher; import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.jupiter.api.AfterAll; @@ -259,7 +260,8 @@ public class SqlResourceTest extends CalciteTestBase new CalciteRulesManager(ImmutableSet.of()), CalciteTests.createJoinableFactoryWrapper(), CatalogResolver.NULL_RESOLVER, - new AuthConfig() + new AuthConfig(), + new DruidHookDispatcher() ); lifecycleManager = new SqlLifecycleManager()