SQL test framework extensions (#13426)

SQL test framework extensions

* Capture planner artifacts: logical plan, etc.
* Planner test builder validates the logical plan
* Validation for the SQL resut schema (we already have
  validation for the Druid row signature)
* Better Guice integration: properties, reuse Guice modules
* Avoid need for hand-coded expr, macro tables
* Retire some of the test-specific query component creation
* Fix query log hook race condition
This commit is contained in:
Paul Rogers 2022-12-02 09:11:59 -08:00 committed by GitHub
parent 30498c1f98
commit b76ff16d00
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
63 changed files with 1602 additions and 665 deletions

View File

@ -36,6 +36,7 @@ import org.skife.config.ConfigurationObjectFactory;
import javax.validation.Validator;
import java.util.Properties;
@LazySingleton
public class DruidSecondaryModule implements Module
{
private final Properties properties;

View File

@ -20,9 +20,13 @@
package org.apache.druid.java.util.common;
import com.google.common.base.Strings;
import org.apache.commons.io.IOUtils;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
@ -797,4 +801,18 @@ public class StringUtils
return s.substring(0, maxBytes);
}
}
public static String getResource(Object ref, String resource)
{
try {
InputStream is = ref.getClass().getResourceAsStream(resource);
if (is == null) {
throw new ISE("Resource not found: [%s]", resource);
}
return IOUtils.toString(is, StandardCharsets.UTF_8);
}
catch (IOException e) {
throw new ISE(e, "Cannot load resource: [%s]", resource);
}
}
}

View File

@ -19,10 +19,12 @@
package org.apache.druid.utils;
import org.apache.druid.guice.LazySingleton;
import org.apache.druid.java.util.common.UOE;
import java.lang.reflect.InvocationTargetException;
@LazySingleton
public class RuntimeInfo
{
public int getAvailableProcessors()

View File

@ -19,23 +19,13 @@
package org.apache.druid.compressedbigdecimal;
import com.google.common.collect.ImmutableSet;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
public class CompressedBigDecimalMaxSqlAggregatorTest extends CompressedBigDecimalSqlAggregatorTestBase
{
private static final String FUNCTION_NAME = CompressedBigDecimalMaxSqlAggregator.NAME;
@Override
public DruidOperatorTable createOperatorTable()
{
return new DruidOperatorTable(ImmutableSet.of(new CompressedBigDecimalMaxSqlAggregator()), ImmutableSet.of());
}
@Override
public void testCompressedBigDecimalAggWithNumberParse()
{
testCompressedBigDecimalAggWithNumberParseHelper(
FUNCTION_NAME,
new Object[]{"6.000000000", "6.000000000", "10.100000000"},

View File

@ -19,19 +19,10 @@
package org.apache.druid.compressedbigdecimal;
import com.google.common.collect.ImmutableSet;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
public class CompressedBigDecimalMinSqlAggregatorTest extends CompressedBigDecimalSqlAggregatorTestBase
{
private static final String FUNCTION_NAME = CompressedBigDecimalMinSqlAggregator.NAME;
@Override
public DruidOperatorTable createOperatorTable()
{
return new DruidOperatorTable(ImmutableSet.of(new CompressedBigDecimalMinSqlAggregator()), ImmutableSet.of());
}
@Override
public void testCompressedBigDecimalAggWithNumberParse()
{

View File

@ -20,17 +20,17 @@
package org.apache.druid.compressedbigdecimal;
import com.fasterxml.jackson.databind.MapperFeature;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.data.input.impl.MapInputRowParser;
import org.apache.druid.data.input.impl.TimeAndDimsParseSpec;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.Druids;
@ -45,7 +45,6 @@ import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
@ -74,16 +73,17 @@ public abstract class CompressedBigDecimalSqlAggregatorTestBase extends BaseCalc
TestDataBuilder.RAW_ROWS1.stream().map(m -> TestDataBuilder.createRow(m, PARSER)).collect(Collectors.toList());
@Override
public Iterable<? extends Module> getJacksonModules()
public void configureGuice(DruidInjectorBuilder builder)
{
CompressedBigDecimalModule bigDecimalModule = new CompressedBigDecimalModule();
return Iterables.concat(super.getJacksonModules(), bigDecimalModule.getJacksonModules());
super.configureGuice(builder);
builder.addModule(new CompressedBigDecimalModule());
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
QueryableIndex index =
@ -121,9 +121,6 @@ public abstract class CompressedBigDecimalSqlAggregatorTestBase extends BaseCalc
objectMapper.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
}
@Override
public abstract DruidOperatorTable createOperatorTable();
@Test
public abstract void testCompressedBigDecimalAggWithNumberParse();
@ -279,5 +276,4 @@ public abstract class CompressedBigDecimalSqlAggregatorTestBase extends BaseCalc
ImmutableList.of(expectedResults)
);
}
}

View File

@ -19,19 +19,10 @@
package org.apache.druid.compressedbigdecimal;
import com.google.common.collect.ImmutableSet;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
public class CompressedBigDecimalSumSqlAggregatorTest extends CompressedBigDecimalSqlAggregatorTestBase
{
private static final String FUNCTION_NAME = CompressedBigDecimalSumSqlAggregator.NAME;
@Override
public DruidOperatorTable createOperatorTable()
{
return new DruidOperatorTable(ImmutableSet.of(new CompressedBigDecimalSumSqlAggregator()), ImmutableSet.of());
}
@Override
public void testCompressedBigDecimalAggWithNumberParse()
{
@ -39,7 +30,6 @@ public class CompressedBigDecimalSumSqlAggregatorTest extends CompressedBigDecim
FUNCTION_NAME,
new Object[]{"21.000000000", "21.000000000", "13.100000000"},
CompressedBigDecimalSumAggregatorFactory::new
);
}

View File

@ -19,11 +19,10 @@
package org.apache.druid.query.aggregation.tdigestsketch.sql;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.Druids;
@ -50,7 +49,6 @@ import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
@ -64,21 +62,18 @@ import java.util.List;
public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final DruidOperatorTable OPERATOR_TABLE = new DruidOperatorTable(
ImmutableSet.of(new TDigestSketchQuantileSqlAggregator(), new TDigestGenerateSketchSqlAggregator()),
ImmutableSet.of()
);
@Override
public Iterable<? extends Module> getJacksonModules()
public void configureGuice(DruidInjectorBuilder builder)
{
return Iterables.concat(super.getJacksonModules(), new TDigestSketchModule().getJacksonModules());
super.configureGuice(builder);
builder.addModule(new TDigestSketchModule());
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
TDigestSketchModule.registerSerde();
@ -116,12 +111,6 @@ public class TDigestSketchSqlAggregatorTest extends BaseCalciteQueryTest
);
}
@Override
public DruidOperatorTable createOperatorTable()
{
return OPERATOR_TABLE;
}
@Test
public void testComputingSketchOnNumericValues()
{

View File

@ -44,7 +44,6 @@ import java.util.List;
*/
public class HllSketchModule implements DruidModule
{
public static final String TYPE_NAME = "HLLSketch"; // common type name to be associated with segment data
public static final String BUILD_TYPE_NAME = "HLLSketchBuild";
public static final String MERGE_TYPE_NAME = "HLLSketchMerge";
@ -53,7 +52,6 @@ public class HllSketchModule implements DruidModule
public static final String ESTIMATE_WITH_BOUNDS_TYPE_NAME = "HLLSketchEstimateWithBounds";
public static final String ESTIMATE_TYPE_NAME = "HLLSketchEstimate";
@Override
public void configure(final Binder binder)
{

View File

@ -43,7 +43,6 @@ import java.util.List;
public class DoublesSketchModule implements DruidModule
{
public static final String DOUBLES_SKETCH = "quantilesDoublesSketch";
public static final String DOUBLES_SKETCH_MERGE = "quantilesDoublesSketchMerge";
public static final ColumnType TYPE = ColumnType.ofComplex(DOUBLES_SKETCH);

View File

@ -19,12 +19,11 @@
package org.apache.druid.query.aggregation.datasketches.hll.sql;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.granularity.PeriodGranularity;
@ -61,13 +60,11 @@ import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.aggregation.ApproxCountDistinctSqlAggregator;
import org.apache.druid.sql.calcite.aggregation.builtin.CountSqlAggregator;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.sql.guice.SqlModule;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.joda.time.DateTimeZone;
@ -78,22 +75,34 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final boolean ROUND = true;
@Override
public Iterable<? extends Module> getJacksonModules()
public void gatherProperties(Properties properties)
{
return Iterables.concat(super.getJacksonModules(), new HllSketchModule().getJacksonModules());
super.gatherProperties(properties);
// Use APPROX_COUNT_DISTINCT_DS_HLL as APPROX_COUNT_DISTINCT impl for these tests.
properties.put(SqlModule.PROPERTY_SQL_APPROX_COUNT_DISTINCT_CHOICE, HllSketchApproxCountDistinctSqlAggregator.NAME);
}
@Override
public void configureGuice(DruidInjectorBuilder builder)
{
super.configureGuice(builder);
builder.addModule(new HllSketchModule());
}
@SuppressWarnings("resource")
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
HllSketchModule.registerSerde();
@ -132,30 +141,6 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
);
}
@Override
public DruidOperatorTable createOperatorTable()
{
final HllSketchApproxCountDistinctSqlAggregator approxCountDistinctSqlAggregator =
new HllSketchApproxCountDistinctSqlAggregator();
return new DruidOperatorTable(
ImmutableSet.of(
approxCountDistinctSqlAggregator,
new HllSketchObjectSqlAggregator(),
// Use APPROX_COUNT_DISTINCT_DS_HLL as APPROX_COUNT_DISTINCT impl for these tests.
new CountSqlAggregator(new ApproxCountDistinctSqlAggregator(approxCountDistinctSqlAggregator)),
new ApproxCountDistinctSqlAggregator(approxCountDistinctSqlAggregator)
),
ImmutableSet.of(
new HllSketchSetUnionOperatorConversion(),
new HllSketchEstimateOperatorConversion(),
new HllSketchToStringOperatorConversion(),
new HllSketchEstimateWithErrorBoundsOperatorConversion()
)
);
}
@Test
public void testApproxCountDistinctHllSketch()
{

View File

@ -19,12 +19,11 @@
package org.apache.druid.query.aggregation.datasketches.quantiles.sql;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.Druids;
@ -63,7 +62,6 @@ import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
@ -79,31 +77,18 @@ import java.util.Map;
public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final DruidOperatorTable OPERATOR_TABLE = new DruidOperatorTable(
ImmutableSet.of(
new DoublesSketchApproxQuantileSqlAggregator(),
new DoublesSketchObjectSqlAggregator()
),
ImmutableSet.of(
new DoublesSketchQuantileOperatorConversion(),
new DoublesSketchQuantilesOperatorConversion(),
new DoublesSketchToHistogramOperatorConversion(),
new DoublesSketchRankOperatorConversion(),
new DoublesSketchCDFOperatorConversion(),
new DoublesSketchSummaryOperatorConversion()
)
);
@Override
public Iterable<? extends Module> getJacksonModules()
public void configureGuice(DruidInjectorBuilder builder)
{
return Iterables.concat(super.getJacksonModules(), new DoublesSketchModule().getJacksonModules());
super.configureGuice(builder);
builder.addModule(new DoublesSketchModule());
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
DoublesSketchModule.registerSerde();
@ -141,12 +126,6 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
);
}
@Override
public DruidOperatorTable createOperatorTable()
{
return OPERATOR_TABLE;
}
@Test
public void testQuantileOnFloatAndLongs()
{

View File

@ -19,17 +19,14 @@
package org.apache.druid.query.aggregation.datasketches.theta.sql;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.ExpressionModule;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.granularity.PeriodGranularity;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.Druids;
import org.apache.druid.query.QueryDataSource;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
@ -57,58 +54,48 @@ import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.aggregation.ApproxCountDistinctSqlAggregator;
import org.apache.druid.sql.calcite.aggregation.builtin.CountSqlAggregator;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.sql.guice.SqlModule;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.joda.time.DateTimeZone;
import org.joda.time.Period;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final String DATA_SOURCE = "foo";
private ExprMacroTable macroTable;
@Before
public void setUp()
@Override
public void gatherProperties(Properties properties)
{
macroTable = createMacroTable();
super.gatherProperties(properties);
// Use APPROX_COUNT_DISTINCT_DS_THETA as APPROX_COUNT_DISTINCT impl for these tests.
properties.put(SqlModule.PROPERTY_SQL_APPROX_COUNT_DISTINCT_CHOICE, ThetaSketchApproxCountDistinctSqlAggregator.NAME);
}
@Override
public Iterable<? extends Module> getJacksonModules()
public void configureGuice(DruidInjectorBuilder builder)
{
return Iterables.concat(super.getJacksonModules(), new SketchModule().getJacksonModules());
}
@Override
public ExprMacroTable createMacroTable()
{
final List<ExprMacroTable.ExprMacro> exprMacros = new ArrayList<>();
for (Class<? extends ExprMacroTable.ExprMacro> clazz : ExpressionModule.EXPR_MACROS) {
exprMacros.add(CalciteTests.INJECTOR.getInstance(clazz));
}
return new ExprMacroTable(exprMacros);
super.configureGuice(builder);
builder.addModule(new SketchModule());
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
SketchModule.registerSerde();
@ -148,31 +135,6 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
);
}
@Override
public DruidOperatorTable createOperatorTable()
{
final ThetaSketchApproxCountDistinctSqlAggregator approxCountDistinctSqlAggregator =
new ThetaSketchApproxCountDistinctSqlAggregator();
return new DruidOperatorTable(
ImmutableSet.of(
new ThetaSketchApproxCountDistinctSqlAggregator(),
new ThetaSketchObjectSqlAggregator(),
// Use APPROX_COUNT_DISTINCT_DS_THETA as APPROX_COUNT_DISTINCT impl for these tests.
new CountSqlAggregator(new ApproxCountDistinctSqlAggregator(approxCountDistinctSqlAggregator)),
new ApproxCountDistinctSqlAggregator(approxCountDistinctSqlAggregator)
),
ImmutableSet.of(
new ThetaSketchEstimateOperatorConversion(),
new ThetaSketchEstimateWithErrorBoundsOperatorConversion(),
new ThetaSketchSetIntersectOperatorConversion(),
new ThetaSketchSetUnionOperatorConversion(),
new ThetaSketchSetNotOperatorConversion()
)
);
}
@Test
public void testApproxCountDistinctThetaSketch()
{
@ -235,13 +197,13 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
"v0",
"substring(\"dim2\", 0, 1)",
ColumnType.STRING,
macroTable
queryFramework().macroTable()
),
new ExpressionVirtualColumn(
"v1",
"concat(substring(\"dim2\", 0, 1),'x')",
ColumnType.STRING,
macroTable
queryFramework().macroTable()
)
)
.aggregators(
@ -444,7 +406,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
"v0",
"concat(\"dim2\",'hello')",
ColumnType.STRING,
macroTable
queryFramework().macroTable()
)
)
.aggregators(
@ -621,7 +583,7 @@ public class ThetaSketchSqlAggregatorTest extends BaseCalciteQueryTest
"v0",
"concat(\"dim2\",'hello')",
ColumnType.STRING,
macroTable
queryFramework().macroTable()
)
)
.aggregators(

View File

@ -19,10 +19,9 @@
package org.apache.druid.query.aggregation.bloom.sql;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.DimensionSchema;
@ -35,6 +34,7 @@ import org.apache.druid.data.input.impl.MapInputRowParser;
import org.apache.druid.data.input.impl.TimeAndDimsParseSpec;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.guice.BloomFilterExtensionModule;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.Druids;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
@ -59,7 +59,6 @@ import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
@ -76,21 +75,18 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
private static final String DATA_SOURCE = "numfoo";
private static final DruidOperatorTable OPERATOR_TABLE = new DruidOperatorTable(
ImmutableSet.of(new BloomFilterSqlAggregator()),
ImmutableSet.of()
);
@Override
public Iterable<? extends Module> getJacksonModules()
public void configureGuice(DruidInjectorBuilder builder)
{
return Iterables.concat(super.getJacksonModules(), new BloomFilterExtensionModule().getJacksonModules());
super.configureGuice(builder);
builder.addModule(new BloomFilterExtensionModule());
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
InputRowParser parser = new MapInputRowParser(
@ -135,12 +131,6 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
);
}
@Override
public DruidOperatorTable createOperatorTable()
{
return OPERATOR_TABLE;
}
@Test
public void testBloomFilterAgg() throws Exception
{
@ -178,7 +168,7 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
.build()
),
ImmutableList.of(
new Object[]{CalciteTests.getJsonMapper().writeValueAsString(expected1)}
new Object[]{queryFramework().queryJsonMapper().writeValueAsString(expected1)}
)
);
}
@ -211,6 +201,7 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
}
}
ObjectMapper jsonMapper = queryFramework().queryJsonMapper();
testQuery(
"SELECT\n"
+ "BLOOM_FILTER(dim1, 1000),\n"
@ -239,8 +230,8 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
),
ImmutableList.of(
new Object[] {
CalciteTests.getJsonMapper().writeValueAsString(expected1),
CalciteTests.getJsonMapper().writeValueAsString(expected2)
jsonMapper.writeValueAsString(expected1),
jsonMapper.writeValueAsString(expected2)
}
)
);
@ -288,10 +279,9 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
.build()
),
ImmutableList.of(
new Object[]{CalciteTests.getJsonMapper().writeValueAsString(expected1)}
new Object[]{queryFramework().queryJsonMapper().writeValueAsString(expected1)}
)
);
}
@Test
@ -335,7 +325,7 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
.build()
),
ImmutableList.of(
new Object[]{CalciteTests.getJsonMapper().writeValueAsString(expected3)}
new Object[]{queryFramework().queryJsonMapper().writeValueAsString(expected3)}
)
);
}
@ -388,7 +378,7 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
.build()
),
ImmutableList.of(
new Object[]{CalciteTests.getJsonMapper().writeValueAsString(expected1)}
new Object[]{queryFramework().queryJsonMapper().writeValueAsString(expected1)}
)
);
}
@ -442,7 +432,7 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
.build()
),
ImmutableList.of(
new Object[]{CalciteTests.getJsonMapper().writeValueAsString(expected1)}
new Object[]{queryFramework().queryJsonMapper().writeValueAsString(expected1)}
)
);
}
@ -496,7 +486,7 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
.build()
),
ImmutableList.of(
new Object[]{CalciteTests.getJsonMapper().writeValueAsString(expected1)}
new Object[]{queryFramework().queryJsonMapper().writeValueAsString(expected1)}
)
);
}
@ -510,6 +500,7 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
BloomKFilter expected1 = new BloomKFilter(TEST_NUM_ENTRIES);
BloomKFilter expected2 = new BloomKFilter(TEST_NUM_ENTRIES);
ObjectMapper jsonMapper = queryFramework().queryJsonMapper();
testQuery(
"SELECT\n"
+ "BLOOM_FILTER(dim1, 1000),\n"
@ -540,8 +531,8 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
),
ImmutableList.of(
new Object[] {
CalciteTests.getJsonMapper().writeValueAsString(expected1),
CalciteTests.getJsonMapper().writeValueAsString(expected2)
jsonMapper.writeValueAsString(expected1),
jsonMapper.writeValueAsString(expected2)
}
)
);
@ -556,6 +547,7 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
BloomKFilter expected1 = new BloomKFilter(TEST_NUM_ENTRIES);
BloomKFilter expected2 = new BloomKFilter(TEST_NUM_ENTRIES);
ObjectMapper jsonMapper = queryFramework().queryJsonMapper();
testQuery(
"SELECT\n"
+ "dim2,\n"
@ -596,8 +588,8 @@ public class BloomFilterSqlAggregatorTest extends BaseCalciteQueryTest
ImmutableList.of(
new Object[] {
"a",
CalciteTests.getJsonMapper().writeValueAsString(expected1),
CalciteTests.getJsonMapper().writeValueAsString(expected2)
jsonMapper.writeValueAsString(expected1),
jsonMapper.writeValueAsString(expected2)
}
)
);

View File

@ -19,22 +19,16 @@
package org.apache.druid.query.filter.sql;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import org.apache.calcite.avatica.SqlType;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.BloomFilterExtensionModule;
import org.apache.druid.guice.BloomFilterSerializersModule;
import org.apache.druid.guice.ExpressionModule;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.Druids;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.query.expression.LookupExprMacro;
import org.apache.druid.query.expressions.BloomFilterExpressions;
import org.apache.druid.query.filter.BloomDimFilter;
import org.apache.druid.query.filter.BloomKFilter;
import org.apache.druid.query.filter.BloomKFilterHolder;
@ -42,50 +36,21 @@ import org.apache.druid.query.filter.ExpressionDimFilter;
import org.apache.druid.query.filter.OrDimFilter;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.aggregation.ApproxCountDistinctSqlAggregator;
import org.apache.druid.sql.calcite.aggregation.builtin.BuiltinApproxCountDistinctSqlAggregator;
import org.apache.druid.sql.calcite.aggregation.builtin.CountSqlAggregator;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.http.SqlParameter;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
{
@Override
public DruidOperatorTable createOperatorTable()
public void configureGuice(DruidInjectorBuilder builder)
{
CalciteTests.getJsonMapper().registerModule(new BloomFilterSerializersModule());
return new DruidOperatorTable(
ImmutableSet.of(
new CountSqlAggregator(new ApproxCountDistinctSqlAggregator(new BuiltinApproxCountDistinctSqlAggregator()))
),
ImmutableSet.of(new BloomFilterOperatorConversion())
);
}
@Override
public ExprMacroTable createMacroTable()
{
final List<ExprMacroTable.ExprMacro> exprMacros = new ArrayList<>();
for (Class<? extends ExprMacroTable.ExprMacro> clazz : ExpressionModule.EXPR_MACROS) {
exprMacros.add(CalciteTests.INJECTOR.getInstance(clazz));
}
exprMacros.add(CalciteTests.INJECTOR.getInstance(LookupExprMacro.class));
exprMacros.add(new BloomFilterExpressions.TestExprMacro());
return new ExprMacroTable(exprMacros);
}
@Override
public Iterable<? extends Module> getJacksonModules()
{
return Iterables.concat(super.getJacksonModules(), new BloomFilterExtensionModule().getJacksonModules());
super.configureGuice(builder);
builder.addModule(new BloomFilterExtensionModule());
}
@Test
@ -144,7 +109,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
base64
),
null,
createMacroTable()
queryFramework().macroTable()
)
)
.aggregators(aggregators(new CountAggregatorFactory("a0")))

View File

@ -19,12 +19,11 @@
package org.apache.druid.query.aggregation.histogram.sql;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.Druids;
import org.apache.druid.query.QueryContexts;
@ -55,7 +54,6 @@ import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
@ -68,21 +66,18 @@ import java.util.List;
public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final DruidOperatorTable OPERATOR_TABLE = new DruidOperatorTable(
ImmutableSet.of(new QuantileSqlAggregator(), new FixedBucketsHistogramQuantileSqlAggregator()),
ImmutableSet.of()
);
@Override
public Iterable<? extends Module> getJacksonModules()
public void configureGuice(DruidInjectorBuilder builder)
{
return Iterables.concat(super.getJacksonModules(), new ApproximateHistogramDruidModule().getJacksonModules());
super.configureGuice(builder);
builder.addModule(new ApproximateHistogramDruidModule());
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
ApproximateHistogramDruidModule.registerSerde();
@ -123,13 +118,6 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
);
}
@Override
public DruidOperatorTable createOperatorTable()
{
return OPERATOR_TABLE;
}
@Test
public void testQuantileOnFloatAndLongs()
{
@ -568,7 +556,6 @@ public class FixedBucketsHistogramQuantileSqlAggregatorTest extends BaseCalciteQ
);
}
@Test
public void testGroupByAggregatorDefaultValues()
{

View File

@ -19,11 +19,10 @@
package org.apache.druid.query.aggregation.histogram.sql;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.Druids;
@ -54,7 +53,6 @@ import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
@ -67,21 +65,18 @@ import java.util.List;
public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final DruidOperatorTable OPERATOR_TABLE = new DruidOperatorTable(
ImmutableSet.of(new QuantileSqlAggregator()),
ImmutableSet.of()
);
@Override
public Iterable<? extends Module> getJacksonModules()
public void configureGuice(DruidInjectorBuilder builder)
{
return Iterables.concat(super.getJacksonModules(), new ApproximateHistogramDruidModule().getJacksonModules());
super.configureGuice(builder);
builder.addModule(new ApproximateHistogramDruidModule());
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
ApproximateHistogramDruidModule.registerSerde();
@ -122,12 +117,6 @@ public class QuantileSqlAggregatorTest extends BaseCalciteQueryTest
);
}
@Override
public DruidOperatorTable createOperatorTable()
{
return OPERATOR_TABLE;
}
@Test
public void testQuantileOnFloatAndLongs()
{

View File

@ -21,7 +21,6 @@ package org.apache.druid.msq.guice;
import com.fasterxml.jackson.databind.Module;
import com.google.inject.Binder;
import com.google.inject.Inject;
import com.google.inject.Provides;
import org.apache.druid.discovery.NodeRole;
import org.apache.druid.guice.LazySingleton;
@ -36,7 +35,6 @@ import org.apache.druid.sql.calcite.external.ExternalOperatorConversion;
import org.apache.druid.sql.guice.SqlBindings;
import java.util.List;
import java.util.Properties;
/**
* Module for providing the {@code EXTERN} operator.
@ -44,9 +42,6 @@ import java.util.Properties;
@LoadScope(roles = NodeRole.BROKER_JSON_NAME)
public class MSQSqlModule implements DruidModule
{
@Inject
Properties properties = null;
@Override
public List<? extends Module> getJacksonModules()
{
@ -66,7 +61,6 @@ public class MSQSqlModule implements DruidModule
SqlBindings.addOperatorConversion(binder, ExternalOperatorConversion.class);
}
@Provides
@MSQ
@LazySingleton

View File

@ -55,7 +55,6 @@ import java.util.Map;
*/
public class MSQWarningsTest extends MSQTestBase
{
private File toRead;
private RowSignature rowSignature;
private String toReadFileNameAsJson;

View File

@ -28,6 +28,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
@ -40,6 +41,7 @@ import org.apache.druid.data.input.impl.LongDimensionSchema;
import org.apache.druid.data.input.impl.StringDimensionSchema;
import org.apache.druid.discovery.NodeRole;
import org.apache.druid.frame.testutil.FrameTestUtil;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.guice.DruidSecondaryModule;
import org.apache.druid.guice.GuiceInjectors;
import org.apache.druid.guice.IndexingServiceTuningConfigModule;
@ -55,6 +57,7 @@ import org.apache.druid.indexing.common.task.CompactionTask;
import org.apache.druid.indexing.common.task.IndexTask;
import org.apache.druid.indexing.common.task.batch.parallel.ParallelIndexTuningConfig;
import org.apache.druid.initialization.CoreInjectorBuilder;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.StringUtils;
@ -65,6 +68,7 @@ import org.apache.druid.java.util.common.guava.Yielder;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.metadata.input.InputSourceModule;
import org.apache.druid.msq.exec.Controller;
import org.apache.druid.msq.exec.WorkerMemoryParameters;
import org.apache.druid.msq.guice.MSQDurableStorageModule;
@ -129,6 +133,7 @@ import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.SqlToolbox;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.external.ExternalDataSource;
import org.apache.druid.sql.calcite.external.ExternalOperatorConversion;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
@ -141,6 +146,7 @@ import org.apache.druid.sql.calcite.util.QueryFrameworkUtils;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.SqlTestFramework;
import org.apache.druid.sql.calcite.view.InProcessViewManager;
import org.apache.druid.sql.guice.SqlBindings;
import org.apache.druid.storage.StorageConnector;
import org.apache.druid.storage.StorageConnectorProvider;
import org.apache.druid.storage.local.LocalFileStorageConnector;
@ -162,6 +168,7 @@ import org.mockito.Mockito;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
@ -243,12 +250,54 @@ public class MSQTestBase extends BaseCalciteQueryTest
)
);
@Override
public void configureGuice(DruidInjectorBuilder builder)
{
super.configureGuice(builder);
builder.addModule(new DruidModule() {
// Small subset of MsqSqlModule
@Override
public void configure(Binder binder)
{
// We want this module to bring InputSourceModule along for the ride.
binder.install(new InputSourceModule());
SqlBindings.addOperatorConversion(binder, ExternalOperatorConversion.class);
}
@Override
public List<? extends com.fasterxml.jackson.databind.Module> getJacksonModules()
{
// We want this module to bring input sources along for the ride.
return new InputSourceModule().getJacksonModules();
}
});
}
@After
public void tearDown2()
{
groupByBuffers.close();
}
// This test is a Frankenstein creation: it uses the injector set up by the
// SqlTestFramework to pull items from that are then used to create another
// injector that has the MSQ dependencies. This allows the test to create a
// "shadow" statement factory that is used for tests. It works... kinda.
//
// Better would be to sort through the Guice stuff and move it into the
// configureGuice() method above: use the SQL test framework injector so
// that everything is coordinated. Use the planner factory provided by that
// framework.
//
// Leaving well enough alone for now because any change should be done by
// someone familiar with the rather complex setup code below.
//
// One brute-force attempt ran afoul of circular dependencies: the SQL engine
// is created in the main injector, but it depends on the SegmentCacheManagerFactory
// which depends on the object mapper that the injector will provide, once it
// is built, but has not yet been build while we build the SQL engine.
@Before
public void setUp2()
{
@ -291,7 +340,6 @@ public class MSQTestBase extends BaseCalciteQueryTest
GroupByQueryConfig groupByQueryConfig = new GroupByQueryConfig();
binder.bind(DruidProcessingConfig.class).toInstance(druidProcessingConfig);
binder.bind(new TypeLiteral<Set<NodeRole>>()
{
@ -411,8 +459,8 @@ public class MSQTestBase extends BaseCalciteQueryTest
PlannerFactory plannerFactory = new PlannerFactory(
rootSchema,
CalciteTests.createOperatorTable(),
CalciteTests.createExprMacroTable(),
qf.operatorTable(),
qf.macroTable(),
PLANNER_CONFIG_DEFAULT,
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
objectMapper,

View File

@ -115,5 +115,4 @@ public class MSQTestOverlordServiceClient extends NoopOverlordClient
{
return msqSpec.get(id);
}
}

View File

@ -19,10 +19,8 @@
package org.apache.druid.query.aggregation.variance.sql;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.DimensionSchema;
@ -30,6 +28,7 @@ import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.DoubleDimensionSchema;
import org.apache.druid.data.input.impl.FloatDimensionSchema;
import org.apache.druid.data.input.impl.LongDimensionSchema;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.Druids;
@ -56,7 +55,6 @@ import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
@ -70,28 +68,18 @@ import java.util.List;
public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
{
private static final DruidOperatorTable OPERATOR_TABLE = new DruidOperatorTable(
ImmutableSet.of(
new BaseVarianceSqlAggregator.VarPopSqlAggregator(),
new BaseVarianceSqlAggregator.VarSampSqlAggregator(),
new BaseVarianceSqlAggregator.VarianceSqlAggregator(),
new BaseVarianceSqlAggregator.StdDevPopSqlAggregator(),
new BaseVarianceSqlAggregator.StdDevSampSqlAggregator(),
new BaseVarianceSqlAggregator.StdDevSqlAggregator()
),
ImmutableSet.of()
);
@Override
public Iterable<? extends Module> getJacksonModules()
public void configureGuice(DruidInjectorBuilder builder)
{
return Iterables.concat(super.getJacksonModules(), new DruidStatsModule().getJacksonModules());
super.configureGuice(builder);
builder.addModule(new DruidStatsModule());
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
final QueryableIndex index =
@ -132,12 +120,6 @@ public class VarianceSqlAggregatorTest extends BaseCalciteQueryTest
);
}
@Override
public DruidOperatorTable createOperatorTable()
{
return OPERATOR_TABLE;
}
public void addToHolder(VarianceAggregatorCollector holder, Object raw)
{
addToHolder(holder, raw, 1);

View File

@ -20,15 +20,11 @@
package org.apache.druid.query.sql;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.ExpressionModule;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.math.expr.ExprMacroTable.ExprMacro;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.guice.SleepModule;
import org.apache.druid.query.Druids;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.expression.LookupExprMacro;
import org.apache.druid.query.expressions.SleepExprMacro;
import org.apache.druid.query.filter.BoundDimFilter;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.query.scan.ScanQuery.ResultFormat;
@ -36,34 +32,15 @@ import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
public class SleepSqlTest extends BaseCalciteQueryTest
{
@Override
public DruidOperatorTable createOperatorTable()
public void configureGuice(DruidInjectorBuilder builder)
{
return new DruidOperatorTable(
ImmutableSet.of(),
ImmutableSet.of(new SleepOperatorConversion())
);
}
@Override
public ExprMacroTable createMacroTable()
{
final List<ExprMacro> exprMacros = new ArrayList<>();
for (Class<? extends ExprMacroTable.ExprMacro> clazz : ExpressionModule.EXPR_MACROS) {
exprMacros.add(CalciteTests.INJECTOR.getInstance(clazz));
}
exprMacros.add(CalciteTests.INJECTOR.getInstance(LookupExprMacro.class));
exprMacros.add(new SleepExprMacro());
return new ExprMacroTable(exprMacros);
super.configureGuice(builder);
builder.addModule(new SleepModule());
}
@Test
@ -80,7 +57,7 @@ public class SleepSqlTest extends BaseCalciteQueryTest
"v0",
"sleep(\"m1\")",
ColumnType.STRING,
createMacroTable()
queryFramework().macroTable()
)
)
.columns("v0")

View File

@ -28,6 +28,8 @@ import java.util.LinkedHashSet;
*/
public class ExtensionsConfig
{
public static final String PROPERTY_BASE = "druid.extensions";
@JsonProperty
@NotNull
private boolean searchCurrentClassloader = true;

View File

@ -55,6 +55,7 @@ import java.util.stream.Collectors;
* any kind of extension that may be needed in the future.
* The extensions are cached so that they can be reported by various REST APIs.
*/
@LazySingleton
public class ExtensionsLoader
{
private static final Logger log = new Logger(ExtensionsLoader.class);

View File

@ -35,8 +35,8 @@ public class ExtensionsModule implements Module
public void configure(Binder binder)
{
binder.bind(ExtensionsLoader.class).in(LazySingleton.class);
JsonConfigProvider.bind(binder, "druid.extensions", ExtensionsConfig.class);
JsonConfigProvider.bind(binder, "druid.modules", ModulesConfig.class);
JsonConfigProvider.bind(binder, ExtensionsConfig.PROPERTY_BASE, ExtensionsConfig.class);
JsonConfigProvider.bind(binder, ModulesConfig.PROPERTY_BASE, ModulesConfig.class);
}
/**

View File

@ -26,6 +26,8 @@ import java.util.List;
public class ModulesConfig
{
public static final String PROPERTY_BASE = "druid.modules";
/**
* Canonical class names of modules, which should not be loaded despite they are founded in extensions from {@link
* ExtensionsConfig#loadList} or the standard list of modules loaded by some node type, e. g. {@code

View File

@ -19,8 +19,10 @@
package org.apache.druid.guice;
import com.google.inject.util.Providers;
import org.apache.druid.jackson.JacksonModule;
import org.apache.druid.math.expr.ExpressionProcessingModule;
import org.apache.druid.utils.RuntimeInfo;
import java.util.Arrays;
import java.util.Properties;
@ -80,4 +82,19 @@ public class StartupInjectorBuilder extends BaseInjectorBuilder<StartupInjectorB
);
return this;
}
/**
* Configure the injector to not load server-only classes by binding those
* classes to providers of null values. Avoids accidental dependencies of
* test code on classes not intended for classes by preventing Guice from
* helpfully providing implicit instances.
*/
public StartupInjectorBuilder forTests()
{
add(binder -> {
binder.bind(ExtensionsLoader.class).toProvider(Providers.of(null));
binder.bind(RuntimeInfo.class).toProvider(Providers.of(null));
});
return this;
}
}

View File

@ -42,6 +42,8 @@ import java.io.IOException;
*/
public class DefaultObjectMapper extends ObjectMapper
{
public static final DefaultObjectMapper INSTANCE = new DefaultObjectMapper();
public DefaultObjectMapper()
{
this((JsonFactory) null, null);

View File

@ -0,0 +1,138 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.guice;
import com.google.inject.Injector;
import org.apache.druid.utils.RuntimeInfo;
import org.junit.Test;
import java.util.Collections;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
public class StartupInjectorBuilderTest
{
@Test
public void testEmpty()
{
Injector injector = new StartupInjectorBuilder().build();
// Empty properties come along for free
Properties props = injector.getInstance(Properties.class);
assertNotNull(props);
assertTrue(props.isEmpty());
// Since we didn't configure this item, we get a new instance every time.
assertNotSame(props, injector.getInstance(Properties.class));
// Runtime info is available, though not configured, because Guice can create
// one when requested. Our class, so marked singleton.
assertNotNull(injector.getInstance(RuntimeInfo.class));
assertSame(injector.getInstance(RuntimeInfo.class), injector.getInstance(RuntimeInfo.class));
// The extension loader is available, again via implicit creation.
// Since it is our class, we marked it as a lazy singleton.
assertNotNull(injector.getInstance(ExtensionsLoader.class));
assertSame(injector.getInstance(ExtensionsLoader.class), injector.getInstance(ExtensionsLoader.class));
// Does have the basics. Sample one such entry.
assertNotNull(injector.getInstance(DruidSecondaryModule.class));
assertSame(injector.getInstance(DruidSecondaryModule.class), injector.getInstance(DruidSecondaryModule.class));
}
@Test
public void testEmptyTestInjector()
{
Injector injector = new StartupInjectorBuilder().forTests().build();
// Empty properties come along for free
Properties props = injector.getInstance(Properties.class);
assertNotNull(props);
assertTrue(props.isEmpty());
// Since we didn't configure this item, we get a new instance every time.
assertNotSame(props, injector.getInstance(Properties.class));
// Runtime info bound to null.
assertNull(injector.getInstance(RuntimeInfo.class));
// The extension loader bound to null.
assertNull(injector.getInstance(ExtensionsLoader.class));
// Does have the basics. Sample one such entry.
assertNotNull(injector.getInstance(DruidSecondaryModule.class));
assertSame(injector.getInstance(DruidSecondaryModule.class), injector.getInstance(DruidSecondaryModule.class));
}
@Test
public void testEmptyProperties()
{
Injector injector = new StartupInjectorBuilder()
.withEmptyProperties()
.build();
// Single empty properties instance
Properties props = injector.getInstance(Properties.class);
assertNotNull(props);
assertTrue(props.isEmpty());
// Since we didn't configure this item, we get a new instance every time.
assertSame(props, injector.getInstance(Properties.class));
}
@Test
public void testExplicitProperties()
{
Properties props = new Properties();
props.put("foo", "bar");
Injector injector = new StartupInjectorBuilder()
.forTests()
.withProperties(props)
.build();
// Returns explicit properties
Properties propsInstance = injector.getInstance(Properties.class);
assertSame(props, propsInstance);
}
@Test
public void testExtensionsOption()
{
Properties props = new Properties();
props.put(ExtensionsConfig.PROPERTY_BASE + ".directory", "bogus");
props.put(ModulesConfig.PROPERTY_BASE + ".excludeList", "[\"excluded\"]");
Injector injector = new StartupInjectorBuilder()
.withExtensions()
.withProperties(props)
.build();
// Extensions config is populated. (Can't tests extensions themselves.)
assertEquals("bogus", injector.getInstance(ExtensionsConfig.class).getDirectory());
assertEquals(Collections.singletonList("excluded"), injector.getInstance(ModulesConfig.class).getExcludeList());
}
// Can't test the server option here: there are no actual property files to read.
}

View File

@ -28,6 +28,8 @@ import org.apache.druid.catalog.model.table.InlineTableDefn;
import org.apache.druid.catalog.model.table.LocalTableDefn;
import org.apache.druid.java.util.common.IAE;
import javax.inject.Inject;
import java.util.Map;
/**
@ -75,6 +77,7 @@ public class TableDefnRegistry
this.jsonMapper = jsonMapper;
}
@Inject
public TableDefnRegistry(
final ObjectMapper jsonMapper
)

View File

@ -61,6 +61,7 @@ public class DruidInjectorBuilder
private final ObjectMapper smileMapper;
private final Set<NodeRole> nodeRoles;
private final ModulesConfig modulesConfig;
private boolean ignoreLoadScopes;
public DruidInjectorBuilder(final Injector baseInjector)
{
@ -83,6 +84,18 @@ public class DruidInjectorBuilder
this.modulesConfig = from.modulesConfig;
this.jsonMapper = from.jsonMapper;
this.smileMapper = from.smileMapper;
this.ignoreLoadScopes = from.ignoreLoadScopes;
}
/**
* Ignore load scope annotations on modules. Primarily for testing where a unit
* test is not any Druid node, and may wish to load a module that is annotated
* with a load scope.
*/
public DruidInjectorBuilder ignoreLoadScopes()
{
this.ignoreLoadScopes = true;
return this;
}
/**
@ -127,7 +140,7 @@ public class DruidInjectorBuilder
} else if (input instanceof Class) {
return addClass((Class<?>) input);
} else {
throw new ISE("Unknown module type[%s]", input.getClass());
throw new ISE("Unknown module type [%s]", input.getClass());
}
}
@ -176,6 +189,12 @@ public class DruidInjectorBuilder
log.info("Not loading module [%s] because it is present in excludeList", moduleClassName);
return false;
}
// Tests don't have node roles, and so want to load the given modules
// regardless of the node roles provided.
if (ignoreLoadScopes) {
return true;
}
LoadScope loadScope = moduleClass.getAnnotation(LoadScope.class);
if (loadScope == null) {
// always load if annotation is not specified
@ -204,4 +223,9 @@ public class DruidInjectorBuilder
{
return Guice.createInjector(modules);
}
public Injector baseInjector()
{
return baseInjector;
}
}

View File

@ -0,0 +1,308 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.guice;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Key;
import org.apache.druid.discovery.NodeRole;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.guice.annotations.LoadScope;
import org.apache.druid.initialization.CoreInjectorBuilder;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.common.ISE;
import org.junit.Test;
import javax.inject.Inject;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
public class DruidInjectorBuilderTest
{
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
public static class MockObject
{
}
@JsonTypeName("extn")
public static class MockObjectExtension extends MockObject
{
}
public interface MockInterface
{
}
public static class MockComponent implements MockInterface
{
}
private static class MockGuiceModule implements com.google.inject.Module
{
@Inject
public Properties properties;
@Override
public void configure(Binder binder)
{
binder.bind(MockInterface.class).to(MockComponent.class).in(LazySingleton.class);
}
}
private static class MockDruidModule implements DruidModule
{
@Inject
public Properties properties;
@Override
public void configure(Binder binder)
{
}
@Override
public List<? extends Module> getJacksonModules()
{
return ImmutableList.<Module>of(
new SimpleModule("MockModule").registerSubtypes(MockObjectExtension.class)
);
}
}
@LoadScope(roles = NodeRole.BROKER_JSON_NAME)
private static class MockRoleModule extends MockDruidModule
{
}
@Test
public void testEmpty()
{
Properties props = new Properties();
props.put("foo", "bar");
Injector injector = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withProperties(props)
.build()
).build();
// Returns explicit properties
Properties propsInstance = injector.getInstance(Properties.class);
assertSame(props, propsInstance);
}
/**
* Test the most generic form: addInput. Calls addModule() internally.
*/
@Test
public void testAddInputModules() throws IOException
{
Properties props = new Properties();
props.put("foo", "bar");
MockGuiceModule guiceModule = new MockGuiceModule();
MockDruidModule druidModule = new MockDruidModule();
Injector injector = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withProperties(props)
.build()
)
.addInput(guiceModule)
.addInput(druidModule)
.build();
// Verify injection occurred
assertSame(props, guiceModule.properties);
assertSame(props, druidModule.properties);
verifyInjector(injector);
}
private void verifyInjector(Injector injector) throws IOException
{
// Guice module did its thing
assertTrue(injector.getInstance(MockInterface.class) instanceof MockComponent);
// And that the Druid module set up Jackson.
String json = "{\"type\": \"extn\"}";
ObjectMapper om = injector.getInstance(Key.get(ObjectMapper.class, Json.class));
MockObject obj = om.readValue(json, MockObject.class);
assertTrue(obj instanceof MockObjectExtension);
}
/**
* Test the ability to pass module classes rather than instances.
*/
@Test
public void testAddInputClasses() throws IOException
{
Properties props = new Properties();
props.put("foo", "bar");
Injector injector = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withProperties(props)
.build()
)
.addInput(MockGuiceModule.class)
.addInput(MockDruidModule.class)
.build();
// Can't verify injection here, sadly
verifyInjector(injector);
}
@Test
public void testBadModule()
{
DruidInjectorBuilder builder = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withEmptyProperties()
.build()
);
assertThrows(ISE.class, () -> builder.addInput("I'm not a module"));
}
@Test
public void testBadModuleClass()
{
DruidInjectorBuilder builder = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withEmptyProperties()
.build()
);
assertThrows(ISE.class, () -> builder.addInput(Object.class));
}
@Test
public void testAddModules() throws IOException
{
Injector injector = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withEmptyProperties()
.build()
)
.addModules(new MockGuiceModule(), new MockDruidModule())
.build();
verifyInjector(injector);
}
@Test
public void testAddAll() throws IOException
{
Injector injector = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withEmptyProperties()
.build()
)
.addAll(Arrays.asList(new MockGuiceModule(), new MockDruidModule()))
.build();
verifyInjector(injector);
}
/**
* Enable extensions. Then, exclude our JSON test module. As a result, the
* JSON object will fail to deserialize.
*/
@Test
public void testExclude()
{
Properties props = new Properties();
props.put(ModulesConfig.PROPERTY_BASE + ".excludeList", "[\"" + MockDruidModule.class.getName() + "\"]");
Injector injector = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.withExtensions()
.withProperties(props)
.build()
)
.addInput(MockGuiceModule.class)
.addInput(MockDruidModule.class)
.build();
assertThrows(IOException.class, () -> verifyInjector(injector));
}
@Test
public void testMatchingNodeRole() throws IOException
{
Injector injector = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withEmptyProperties()
.build(),
ImmutableSet.of(NodeRole.BROKER)
)
.addModules(new MockGuiceModule(), new MockRoleModule())
.build();
verifyInjector(injector);
}
@Test
public void testNotMatchingNodeRole()
{
Injector injector = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withEmptyProperties()
.build(),
ImmutableSet.of(NodeRole.COORDINATOR)
)
.addModules(new MockGuiceModule(), new MockRoleModule())
.build();
assertThrows(IOException.class, () -> verifyInjector(injector));
}
@Test
public void testIgnoreNodeRole() throws IOException
{
Injector injector = new CoreInjectorBuilder(
new StartupInjectorBuilder()
.forTests()
.withEmptyProperties()
.build(),
ImmutableSet.of(NodeRole.COORDINATOR)
)
.ignoreLoadScopes()
.addModules(new MockGuiceModule(), new MockRoleModule())
.build();
verifyInjector(injector);
}
}

View File

@ -22,9 +22,9 @@ package org.apache.druid.server;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.inject.Guice;
import com.google.inject.Injector;
import org.apache.druid.guice.PropertiesModule;
import org.apache.druid.guice.StartupInjectorBuilder;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.initialization.ServerInjectorBuilderTest;
import org.apache.druid.java.util.common.StringUtils;
@ -78,8 +78,9 @@ public class StatusResourceTest
private void testHiddenPropertiesWithPropertyFileName(String fileName) throws Exception
{
Injector injector = Guice.createInjector(Collections.singletonList(new PropertiesModule(Collections.singletonList(
fileName))));
Injector injector = new StartupInjectorBuilder()
.add(new PropertiesModule(Collections.singletonList(fileName)))
.build();
Map<String, String> returnedProperties = injector.getInstance(StatusResource.class).getProperties();
Set<String> lowerCasePropertyNames = returnedProperties.keySet()
.stream()

View File

@ -32,6 +32,7 @@ import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.server.security.ResourceType;
import org.apache.druid.sql.calcite.planner.DruidPlanner;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerHook;
import java.io.Closeable;
import java.util.HashMap;
@ -71,6 +72,7 @@ public abstract class AbstractStatement implements Closeable
protected final Map<String, Object> queryContext;
protected PlannerContext plannerContext;
protected DruidPlanner.AuthResult authResult;
protected PlannerHook hook;
public AbstractStatement(
final SqlToolbox sqlToolbox,
@ -109,6 +111,15 @@ public abstract class AbstractStatement implements Closeable
return queryContext;
}
/**
* Set the hook which can capture planner artifacts during planning. Primarily used
* for testing. Defaults to a "no op" hook that does nothing.
*/
public void setHook(PlannerHook hook)
{
this.hook = hook;
}
/**
* Validate SQL query and authorize against any datasources or views which
* will take part in the query. Must be called by the API methods, not

View File

@ -206,7 +206,8 @@ public class DirectStatement extends AbstractStatement implements Cancelable
try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner(
sqlToolbox.engine,
queryPlus.sql(),
queryContext
queryContext,
hook
)) {
validate(planner);
authorize(planner, authorizer());

View File

@ -66,9 +66,12 @@ public class PreparedStatement extends AbstractStatement
public PrepareResult prepare()
{
try (DruidPlanner planner = sqlToolbox.plannerFactory.createPlanner(
sqlToolbox.engine,
queryPlus.sql(),
queryContext)) {
sqlToolbox.engine,
queryPlus.sql(),
queryContext,
hook
)
) {
validate(planner);
authorize(planner, authorizer());

View File

@ -94,6 +94,7 @@ public class DruidPlanner implements Closeable
private final CalcitePlanner planner;
private final PlannerContext plannerContext;
private final SqlEngine engine;
private final PlannerHook hook;
private State state = State.START;
private SqlStatementHandler handler;
private boolean authorized;
@ -101,13 +102,15 @@ public class DruidPlanner implements Closeable
DruidPlanner(
final FrameworkConfig frameworkConfig,
final PlannerContext plannerContext,
final SqlEngine engine
final SqlEngine engine,
final PlannerHook hook
)
{
this.frameworkConfig = frameworkConfig;
this.planner = new CalcitePlanner(frameworkConfig);
this.plannerContext = plannerContext;
this.engine = engine;
this.hook = hook == null ? NoOpPlannerHook.INSTANCE : hook;
}
/**
@ -124,7 +127,9 @@ public class DruidPlanner implements Closeable
engine.validateContext(plannerContext.queryContextMap());
// Parse the query string.
SqlNode root = planner.parse(plannerContext.getSql());
String sql = plannerContext.getSql();
hook.captureSql(sql);
SqlNode root = planner.parse(sql);
handler = createHandler(root);
try {
@ -162,7 +167,6 @@ public class DruidPlanner implements Closeable
throw new ValidationException(StringUtils.format("Cannot execute [%s].", node.getKind()));
}
/**
* Prepare a SQL query for execution, including some initial parsing and
* validation and any dynamic parameter type resolution, to support prepared
@ -295,5 +299,11 @@ public class DruidPlanner implements Closeable
{
return plannerContext.getTimeZone();
}
@Override
public PlannerHook hook()
{
return hook;
}
}
}

View File

@ -74,6 +74,7 @@ public abstract class IngestHandler extends QueryHandler
{
super(handlerContext, queryNode, explain);
this.ingestionGranularity = ingestNode.getPartitionedBy();
handlerContext.hook().captureInsert(ingestNode);
}
protected static SqlNode convertQuery(DruidSqlIngest sqlNode) throws ValidationException

View File

@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.planner;
import org.apache.calcite.interpreter.BindableRel;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.SqlInsert;
import org.apache.druid.sql.calcite.rel.DruidRel;
public class NoOpPlannerHook implements PlannerHook
{
public static final NoOpPlannerHook INSTANCE = new NoOpPlannerHook();
@Override
public void captureSql(String sql)
{
}
@Override
public void captureQueryRel(RelRoot rootQueryRel)
{
}
@Override
public void captureDruidRel(DruidRel<?> druidRel)
{
}
@Override
public void captureBindableRel(BindableRel bindableRel)
{
}
@Override
public void captureParameterTypes(RelDataType parameterTypes)
{
}
@Override
public void captureInsert(SqlInsert insert)
{
}
}

View File

@ -0,0 +1,78 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.planner;
import org.apache.calcite.interpreter.BindableRel;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.SqlInsert;
import org.apache.druid.sql.calcite.rel.DruidRel;
public class PlannerCaptureHook implements PlannerHook
{
private RelRoot relRoot;
private SqlInsert insertNode;
@Override
public void captureSql(String sql)
{
// Not used at present. Add a field to capture this if you need it.
}
@Override
public void captureQueryRel(RelRoot rootQueryRel)
{
this.relRoot = rootQueryRel;
}
@Override
public void captureDruidRel(DruidRel<?> druidRel)
{
// Not used at present. Add a field to capture this if you need it.
}
@Override
public void captureBindableRel(BindableRel bindableRel)
{
// Not used at present. Add a field to capture this if you need it.
}
@Override
public void captureParameterTypes(RelDataType parameterTypes)
{
// Not used at present. Add a field to capture this if you need it.
}
@Override
public void captureInsert(SqlInsert insert)
{
this.insertNode = insert;
}
public RelRoot relRoot()
{
return relRoot;
}
public SqlInsert insertNode()
{
return insertNode;
}
}

View File

@ -104,7 +104,8 @@ public class PlannerFactory
public DruidPlanner createPlanner(
final SqlEngine engine,
final String sql,
final Map<String, Object> queryContext
final Map<String, Object> queryContext,
final PlannerHook hook
)
{
final PlannerContext context = PlannerContext.create(
@ -119,7 +120,7 @@ public class PlannerFactory
joinableFactoryWrapper
);
return new DruidPlanner(buildFrameworkConfig(context), context, engine);
return new DruidPlanner(buildFrameworkConfig(context), context, engine, hook);
}
/**
@ -129,7 +130,7 @@ public class PlannerFactory
@VisibleForTesting
public DruidPlanner createPlannerForTesting(final SqlEngine engine, final String sql, final Map<String, Object> queryContext)
{
final DruidPlanner thePlanner = createPlanner(engine, sql, queryContext);
final DruidPlanner thePlanner = createPlanner(engine, sql, queryContext, null);
thePlanner.getPlannerContext()
.setAuthenticationResult(NoopEscalator.getInstance().createEscalatedAuthenticationResult());
try {

View File

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.planner;
import org.apache.calcite.interpreter.BindableRel;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.SqlInsert;
import org.apache.druid.sql.calcite.rel.DruidRel;
/**
* Druid-specific version of Calcite's {@link org.apache.calcite.runtime.Hook Hook}
* class. Captures artifacts of interest in the Druid planning process, generally
* for test validation. Calcite's hook has multiple low-level events, but, sadly,
* none at the points where tests want to verify, except for the opportunity to
* capture the native query.
*/
public interface PlannerHook
{
void captureSql(String sql);
void captureQueryRel(RelRoot rootQueryRel);
void captureDruidRel(DruidRel<?> druidRel);
void captureBindableRel(BindableRel bindableRel);
void captureParameterTypes(RelDataType parameterTypes);
void captureInsert(SqlInsert insert);
}

View File

@ -146,9 +146,11 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand
}
isPrepared = true;
rootQueryRel = handlerContext.planner().rel(validatedQueryNode);
handlerContext.hook().captureQueryRel(rootQueryRel);
final RelDataTypeFactory typeFactory = rootQueryRel.rel.getCluster().getTypeFactory();
final SqlValidator validator = handlerContext.planner().getValidator();
final RelDataType parameterTypes = validator.getParameterRowType(validatedQueryNode);
handlerContext.hook().captureParameterTypes(parameterTypes);
final RelDataType returnedRowType;
if (explain != null) {
@ -291,6 +293,7 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand
);
}
handlerContext.hook().captureBindableRel(bindableRel);
PlannerContext plannerContext = handlerContext.plannerContext();
if (explain != null) {
return planExplanation(bindableRel, false);
@ -466,6 +469,7 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand
protected PlannerResult planWithDruidConvention() throws ValidationException
{
final RelRoot possiblyLimitedRoot = possiblyWrapRootWithOuterLimitFromContext(rootQueryRel);
handlerContext.hook().captureQueryRel(possiblyLimitedRoot);
final QueryMaker queryMaker = buildQueryMaker(possiblyLimitedRoot);
PlannerContext plannerContext = handlerContext.plannerContext();
plannerContext.setQueryMaker(queryMaker);
@ -490,6 +494,7 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand
.plus(rootQueryRel.collation),
parameterized
);
handlerContext.hook().captureDruidRel(druidRel);
if (explain != null) {
return planExplanation(druidRel, true);

View File

@ -57,6 +57,7 @@ public interface SqlStatementHandler
SchemaPlus defaultSchema();
ObjectMapper jsonMapper();
DateTimeZone timeZone();
PlannerHook hook();
}
abstract class BaseStatementHandler implements SqlStatementHandler

View File

@ -61,7 +61,10 @@ public class DruidViewMacro implements TableMacro
plannerFactory.createPlanner(
ViewSqlEngine.INSTANCE,
viewSql,
Collections.emptyMap())) {
Collections.emptyMap(),
null
)
) {
planner.validate();
rowType = planner.prepare().getValidatedRowType();
}

View File

@ -20,11 +20,10 @@
package org.apache.druid.sql.calcite;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Injector;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.druid.annotations.UsedByJUnitParamsRunner;
import org.apache.druid.common.config.NullHandling;
@ -37,7 +36,6 @@ import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.DataSource;
import org.apache.druid.query.Druids;
import org.apache.druid.query.JoinDataSource;
@ -84,7 +82,6 @@ import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
@ -119,11 +116,11 @@ import org.junit.rules.TemporaryFolder;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
@ -488,7 +485,12 @@ public class BaseCalciteQueryTest extends CalciteTestBase
@Rule
public QueryLogHook getQueryLogHook()
{
return queryLogHook = QueryLogHook.create(queryFramework().queryJsonMapper());
// Indirection for the JSON mapper. Otherwise, this rule method is called
// before Setup is called, causing the query framework to be built before
// tests have done their setup. The indirection means we access the query
// framework only when we log the first query. By then, the query framework
// will have been created via the normal path.
return queryLogHook = new QueryLogHook(() -> queryFramework().queryJsonMapper());
}
public SqlTestFramework queryFramework()
@ -512,25 +514,31 @@ public class BaseCalciteQueryTest extends CalciteTestBase
resetFramework();
try {
baseComponentSupplier = new StandardComponentSupplier(
CalciteTests.INJECTOR,
temporaryFolder.newFolder());
temporaryFolder.newFolder()
);
}
catch (IOException e) {
throw new RE(e);
}
queryFramework = new SqlTestFramework.Builder(this)
SqlTestFramework.Builder builder = new SqlTestFramework.Builder(this)
.minTopNThreshold(minTopNThreshold)
.mergeBufferCount(mergeBufferCount)
.build();
.mergeBufferCount(mergeBufferCount);
configureBuilder(builder);
queryFramework = builder.build();
}
protected void configureBuilder(Builder builder)
{
}
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
return baseComponentSupplier.createQuerySegmentWalker(conglomerate, joinableFactory);
return baseComponentSupplier.createQuerySegmentWalker(conglomerate, joinableFactory, injector);
}
@Override
@ -546,6 +554,18 @@ public class BaseCalciteQueryTest extends CalciteTestBase
}
}
@Override
public void gatherProperties(Properties properties)
{
baseComponentSupplier.gatherProperties(properties);
}
@Override
public void configureGuice(DruidInjectorBuilder builder)
{
baseComponentSupplier.configureGuice(builder);
}
@Override
public QueryRunnerFactoryConglomerate createCongolmerate(Builder builder, Closer closer)
{
@ -558,36 +578,18 @@ public class BaseCalciteQueryTest extends CalciteTestBase
baseComponentSupplier.configureJsonMapper(mapper);
}
@Override
public DruidOperatorTable createOperatorTable()
{
return baseComponentSupplier.createOperatorTable();
}
@Override
public ExprMacroTable createMacroTable()
{
return baseComponentSupplier.createMacroTable();
}
@Override
public Map<String, Object> getJacksonInjectables()
{
return baseComponentSupplier.getJacksonInjectables();
}
@Override
public Iterable<? extends Module> getJacksonModules()
{
return baseComponentSupplier.getJacksonModules();
}
@Override
public JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider)
{
return baseComponentSupplier.createJoinableFactoryWrapper(lookupProvider);
}
@Override
public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
{
baseComponentSupplier.finalizeTestFramework(sqlTestFramework);
}
@Override
public Set<ExtensionCalciteRuleProvider> extensionCalciteRules()
{
@ -618,11 +620,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase
basePlannerComponentSupplier.finalizePlanner(plannerFixture);
}
@Override
public void configureGuice(DruidInjectorBuilder builder)
{
}
public void assertQueryIsUnplannable(final String sql, String expectedError)
{
assertQueryIsUnplannable(PLANNER_CONFIG_DEFAULT, sql, expectedError);
@ -839,49 +836,6 @@ public class BaseCalciteQueryTest extends CalciteTestBase
public class CalciteTestConfig implements QueryTestBuilder.QueryTestConfig
{
@Override
public QueryTestRunner analyze(QueryTestBuilder builder)
{
if (builder.expectedResultsVerifier == null && builder.expectedResults != null) {
builder.expectedResultsVerifier = defaultResultsVerifier(
builder.expectedResults,
builder.expectedResultSignature
);
}
final List<QueryTestRunner.QueryRunStep> runSteps = new ArrayList<>();
final List<QueryTestRunner.QueryVerifyStep> verifySteps = new ArrayList<>();
// Historically, a test either prepares the query (to check resources), or
// runs the query (to check the native query and results.) In the future we
// may want to do both in a single test; but we have no such tests today.
if (builder.expectedResources != null) {
Preconditions.checkArgument(
builder.expectedResultsVerifier == null,
"Cannot check both results and resources"
);
QueryTestRunner.PrepareQuery execStep = new QueryTestRunner.PrepareQuery(builder);
runSteps.add(execStep);
verifySteps.add(new QueryTestRunner.VerifyResources(execStep));
} else {
QueryTestRunner.ExecuteQuery execStep = new QueryTestRunner.ExecuteQuery(builder);
runSteps.add(execStep);
// Verify native queries before results. (Note: change from prior pattern
// that reversed the steps.
if (builder.expectedQueries != null) {
verifySteps.add(new QueryTestRunner.VerifyNativeQueries(execStep));
}
if (builder.expectedResultsVerifier != null) {
verifySteps.add(new QueryTestRunner.VerifyResults(execStep));
}
// The exception is always verified: either there should be no exception
// (the other steps ran), or there should be the defined exception.
verifySteps.add(new QueryTestRunner.VerifyExpectedException(execStep));
}
return new QueryTestRunner(runSteps, verifySteps);
}
@Override
public QueryLogHook queryLogHook()
{
@ -897,7 +851,7 @@ public class BaseCalciteQueryTest extends CalciteTestBase
@Override
public PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig)
{
return queryFramework.plannerFixture(BaseCalciteQueryTest.this, plannerConfig, authConfig);
return queryFramework().plannerFixture(BaseCalciteQueryTest.this, plannerConfig, authConfig);
}
@Override
@ -905,6 +859,18 @@ public class BaseCalciteQueryTest extends CalciteTestBase
{
return queryFramework().queryJsonMapper();
}
@Override
public ResultsVerifier defaultResultsVerifier(
List<Object[]> expectedResults,
RowSignature expectedResultSignature
)
{
return BaseCalciteQueryTest.this.defaultResultsVerifier(
expectedResults,
expectedResultSignature
);
}
}
public Set<ResourceAction> analyzeResources(

View File

@ -20,15 +20,21 @@
package org.apache.druid.sql.calcite;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Binder;
import org.apache.druid.data.input.impl.CsvInputFormat;
import org.apache.druid.data.input.impl.InlineInputSource;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.metadata.input.InputSourceModule;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
@ -41,10 +47,12 @@ import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.server.security.ResourceType;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.calcite.external.ExternalDataSource;
import org.apache.druid.sql.calcite.external.ExternalOperatorConversion;
import org.apache.druid.sql.calcite.parser.DruidSqlInsert;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.guice.SqlBindings;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Matcher;
import org.hamcrest.MatcherAssert;
@ -93,6 +101,55 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
super(IngestionTestSqlEngine.INSTANCE);
}
@Override
public void configureGuice(DruidInjectorBuilder builder)
{
super.configureGuice(builder);
builder.addModule(new DruidModule() {
// Clone of MSQExternalDataSourceModule since it is not
// visible here.
@Override
public List<? extends Module> getJacksonModules()
{
return Collections.singletonList(
new SimpleModule(getClass().getSimpleName())
.registerSubtypes(ExternalDataSource.class)
);
}
@Override
public void configure(Binder binder)
{
// Nothing to do.
}
});
builder.addModule(new DruidModule() {
// Partial clone of MsqSqlModule, since that module is not
// visible to this one.
@Override
public List<? extends Module> getJacksonModules()
{
// We want this module to bring input sources along for the ride.
return new InputSourceModule().getJacksonModules();
}
@Override
public void configure(Binder binder)
{
// We want this module to bring InputSourceModule along for the ride.
binder.install(new InputSourceModule());
// Set up the EXTERN macro.
SqlBindings.addOperatorConversion(binder, ExternalOperatorConversion.class);
}
});
}
@After
public void tearDown()
{
@ -147,6 +204,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
private List<ResourceAction> expectedResources;
private Query<?> expectedQuery;
private Matcher<Throwable> validationErrorMatcher;
private String expectedLogicalPlanResource;
private IngestionDmlTester()
{
@ -224,6 +282,12 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
);
}
public IngestionDmlTester expectLogicalPlanFrom(String resource)
{
this.expectedLogicalPlanResource = resource;
return this;
}
public void verify()
{
if (didTest) {
@ -299,6 +363,15 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
.expectedResources(expectedResources)
.run();
String expectedLogicalPlan;
if (expectedLogicalPlanResource != null) {
expectedLogicalPlan = StringUtils.getResource(
this,
"/calcite/expected/ingest/" + expectedLogicalPlanResource + "-logicalPlan.txt"
);
} else {
expectedLogicalPlan = null;
}
testBuilder()
.sql(sql)
.queryContext(queryContext)
@ -306,6 +379,7 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest
.plannerConfig(plannerConfig)
.expectedQuery(expectedQuery)
.expectedResults(Collections.singletonList(new Object[]{expectedTargetDataSource, expectedTargetSignature}))
.expectedLogicalPlan(expectedLogicalPlan)
.run();
}

View File

@ -60,7 +60,7 @@ import java.util.Map;
public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
{
private static final Map<String, Object> PARTITIONED_BY_ALL_TIME_QUERY_CONTEXT = ImmutableMap.of(
public static final Map<String, Object> PARTITIONED_BY_ALL_TIME_QUERY_CONTEXT = ImmutableMap.of(
DruidSqlInsert.SQL_INSERT_SEGMENT_GRANULARITY,
"{\"type\":\"all\"}"
);
@ -302,6 +302,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
.context(PARTITIONED_BY_ALL_TIME_QUERY_CONTEXT)
.build()
)
.expectLogicalPlanFrom("insertFromExternal")
.verify();
}
@ -329,6 +330,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
.context(queryContextWithGranularity(Granularities.HOUR))
.build()
)
.expectLogicalPlanFrom("insertWithPartitionedBy")
.verify();
}
@ -423,6 +425,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
.context(queryContextWithGranularity(Granularities.DAY))
.build()
)
.expectLogicalPlanFrom("insertWithClusteredBy")
.verify();
}

View File

@ -19,11 +19,10 @@
package org.apache.druid.sql.calcite;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.inject.Injector;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.DimensionSchema;
@ -34,7 +33,7 @@ import org.apache.druid.data.input.impl.MapInputRowParser;
import org.apache.druid.data.input.impl.StringDimensionSchema;
import org.apache.druid.data.input.impl.TimeAndDimsParseSpec;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.guice.ExpressionModule;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.guice.NestedDataModule;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.math.expr.ExprMacroTable;
@ -45,7 +44,6 @@ import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
import org.apache.druid.query.aggregation.FilteredAggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.expression.LookupExprMacro;
import org.apache.druid.query.filter.InDimFilter;
import org.apache.druid.query.filter.LikeDimFilter;
import org.apache.druid.query.groupby.GroupByQuery;
@ -61,13 +59,11 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.segment.nested.NestedDataComplexTypeSerde;
import org.apache.druid.segment.serde.ComplexMetrics;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.segment.virtual.NestedFieldVirtualColumn;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.sql.calcite.filtration.Filtration;
import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.calcite.util.TestDataBuilder;
import org.apache.druid.timeline.DataSegment;
@ -75,7 +71,6 @@ import org.apache.druid.timeline.partition.LinearShardSpec;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@ -150,19 +145,18 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
RAW_ROWS.stream().map(raw -> TestDataBuilder.createRow(raw, PARSER)).collect(Collectors.toList());
@Override
public Iterable<? extends Module> getJacksonModules()
public void configureGuice(DruidInjectorBuilder builder)
{
return Iterables.concat(
super.getJacksonModules(),
NestedDataModule.getJacksonModulesList()
);
super.configureGuice(builder);
builder.addModule(new NestedDataModule());
}
@SuppressWarnings("resource")
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
) throws IOException
{
NestedDataModule.registerHandlersAndSerde();
@ -194,18 +188,6 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
);
}
@Override
public ExprMacroTable createMacroTable()
{
ComplexMetrics.registerSerde(NestedDataComplexTypeSerde.TYPE_NAME, NestedDataComplexTypeSerde.INSTANCE);
final List<ExprMacroTable.ExprMacro> exprMacros = new ArrayList<>();
for (Class<? extends ExprMacroTable.ExprMacro> clazz : ExpressionModule.EXPR_MACROS) {
exprMacros.add(CalciteTests.INJECTOR.getInstance(clazz));
}
exprMacros.add(CalciteTests.INJECTOR.getInstance(LookupExprMacro.class));
return new ExprMacroTable(exprMacros);
}
@Test
public void testGroupByPath()
{
@ -2428,7 +2410,7 @@ public class CalciteNestedDataQueryTest extends BaseCalciteQueryTest
"v0",
"json_value(json_object('x',\"v1\"),'$.x', 'LONG')",
ColumnType.LONG,
createMacroTable()
queryFramework().macroTable()
),
new NestedFieldVirtualColumn(
"nest",

View File

@ -25,6 +25,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthenticationResult;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.BaseCalciteQueryTest.ResultsVerifier;
import org.apache.druid.sql.calcite.QueryTestRunner.QueryResults;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
@ -61,17 +62,17 @@ public class QueryTestBuilder
{
/**
* Implement to provide the execution framework that the tests require.
* The {@link #analyze(QueryTestBuilder)} method builds up the classes that
* The constructor builds up the classes that
* will run the test, since some verification depends on context, such as that
* provided by {@link BaseCalciteQueryTest}.
*/
public interface QueryTestConfig
{
QueryTestRunner analyze(QueryTestBuilder builder);
QueryLogHook queryLogHook();
ExpectedException expectedException();
ObjectMapper jsonMapper();
PlannerFixture plannerFixture(PlannerConfig plannerConfig, AuthConfig authConfig);
ResultsVerifier defaultResultsVerifier(List<Object[]> expectedResults, RowSignature expectedResultSignature);
}
protected final QueryTestConfig config;
@ -90,6 +91,8 @@ public class QueryTestBuilder
protected boolean queryCannotVectorize;
protected AuthConfig authConfig = new AuthConfig();
protected PlannerFixture plannerFixture;
protected String expectedLogicalPlan;
protected SqlSchema expectedSqlSchema;
public QueryTestBuilder(final QueryTestConfig config)
{
@ -215,23 +218,35 @@ public class QueryTestBuilder
return this;
}
public QueryTestBuilder expectedLogicalPlan(String expectedLogicalPlan)
{
this.expectedLogicalPlan = expectedLogicalPlan;
return this;
}
public QueryTestBuilder expectedSqlSchema(SqlSchema querySchema)
{
this.expectedSqlSchema = querySchema;
return this;
}
public QueryTestRunner build()
{
return config.analyze(this);
return new QueryTestRunner(this);
}
/**
* Internal method to return the cached planner config, or create a new one
* Internal method to return the cached statement factory, or create a new one
* based on the configs provided. Note: does not cache the newly created
* config: doing so would confuse the "please use mine" vs. "create a new
* one each time" semantics.
*/
protected PlannerFixture plannerFixture()
protected SqlStatementFactory statementFactory()
{
if (plannerFixture != null) {
return plannerFixture;
return plannerFixture.statementFactory();
} else {
return config.plannerFixture(plannerConfig, authConfig);
return config.plannerFixture(plannerConfig, authConfig).statementFactory();
}
}

View File

@ -21,8 +21,13 @@ package org.apache.druid.sql.calcite;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.SqlExplainFormat;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.calcite.sql.SqlInsert;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.StringUtils;
@ -35,6 +40,10 @@ import org.apache.druid.sql.DirectStatement;
import org.apache.druid.sql.PreparedStatement;
import org.apache.druid.sql.SqlQueryPlus;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.QueryTestBuilder.QueryTestConfig;
import org.apache.druid.sql.calcite.parser.DruidSqlIngest;
import org.apache.druid.sql.calcite.planner.PlannerCaptureHook;
import org.apache.druid.sql.calcite.planner.PrepareResult;
import org.apache.druid.sql.calcite.table.RowSignatures;
import org.apache.druid.sql.calcite.util.QueryLogHook;
import org.junit.Assert;
@ -48,7 +57,7 @@ import java.util.Map;
import java.util.Set;
/**
* Runs a test build up by {@link QueryTestBuilder}. Running a SQL query test
* Runs a test built up by {@link QueryTestBuilder}. Running a SQL query test
* is somewhat complex; with different modes and items to verify. To manage the
* complexity, test execution is done in two steps:
* <ol>
@ -95,27 +104,32 @@ public class QueryTestRunner
{
public final Map<String, Object> queryContext;
public final String vectorizeOption;
public final RelDataType sqlSignature;
public final RowSignature signature;
public final List<Object[]> results;
public final List<Query<?>> recordedQueries;
public final Set<ResourceAction> resourceActions;
public final RuntimeException exception;
public final PlannerCaptureHook capture;
public QueryResults(
final Map<String, Object> queryContext,
final String vectorizeOption,
final RowSignature signature,
final RelDataType sqlSignature,
final List<Object[]> results,
final List<Query<?>> recordedQueries
final List<Query<?>> recordedQueries,
final PlannerCaptureHook capture
)
{
this.queryContext = queryContext;
this.vectorizeOption = vectorizeOption;
this.signature = signature;
this.sqlSignature = sqlSignature;
this.signature = RowSignatures.fromRelDataType(sqlSignature.getFieldNames(), sqlSignature);
this.results = results;
this.recordedQueries = recordedQueries;
this.resourceActions = null;
this.exception = null;
this.capture = capture;
}
public QueryResults(
@ -131,21 +145,8 @@ public class QueryTestRunner
this.recordedQueries = null;
this.resourceActions = null;
this.exception = exception;
}
public QueryResults(
final Map<String, Object> queryContext,
final String vectorizeOption,
final Set<ResourceAction> resourceActions
)
{
this.queryContext = queryContext;
this.vectorizeOption = vectorizeOption;
this.signature = null;
this.results = null;
this.recordedQueries = null;
this.resourceActions = resourceActions;
this.exception = null;
this.capture = null;
this.sqlSignature = null;
}
}
@ -156,7 +157,8 @@ public class QueryTestRunner
*/
public static class PrepareQuery extends QueryRunStep
{
private Set<ResourceAction> resourceActions;
public Set<ResourceAction> resourceActions;
public RelDataType sqlSignature;
public PrepareQuery(QueryTestBuilder builder)
{
@ -177,10 +179,11 @@ public class QueryTestRunner
.sqlParameters(builder.parameters)
.auth(builder.authenticationResult)
.build();
final SqlStatementFactory sqlStatementFactory = builder.plannerFixture().statementFactory();
final SqlStatementFactory sqlStatementFactory = builder.statementFactory();
final PreparedStatement stmt = sqlStatementFactory.preparedStatement(sqlQuery);
stmt.prepare();
final PrepareResult prepareResult = stmt.prepare();
resourceActions = stmt.allResources();
sqlSignature = prepareResult.getReturnedRowType();
}
}
@ -191,10 +194,12 @@ public class QueryTestRunner
public static class ExecuteQuery extends QueryRunStep
{
private final List<QueryResults> results = new ArrayList<>();
private final boolean doCapture;
public ExecuteQuery(QueryTestBuilder builder)
{
super(builder);
doCapture = builder.expectedLogicalPlan != null;
}
public List<QueryResults> results()
@ -209,7 +214,7 @@ public class QueryTestRunner
BaseCalciteQueryTest.log.info("SQL: %s", builder.sql);
final SqlStatementFactory sqlStatementFactory = builder.plannerFixture().statementFactory();
final SqlStatementFactory sqlStatementFactory = builder.statementFactory();
final SqlQueryPlus sqlQuery = SqlQueryPlus.builder(builder.sql)
.sqlParameters(builder.parameters)
.auth(builder.authenticationResult)
@ -233,25 +238,40 @@ public class QueryTestRunner
theQueryContext.put(QueryContexts.VECTOR_SIZE_KEY, 2); // Small vector size to ensure we use more than one.
}
try {
final Pair<RowSignature, List<Object[]>> plannerResults = getResults(
sqlStatementFactory,
sqlQuery.withContext(theQueryContext));
results.add(new QueryResults(
theQueryContext,
vectorize,
plannerResults.lhs,
plannerResults.rhs,
queryLogHook.getRecordedQueries()
));
}
catch (RuntimeException e) {
results.add(new QueryResults(
theQueryContext,
vectorize,
e
));
}
results.add(runQuery(
sqlStatementFactory,
sqlQuery.withContext(theQueryContext),
vectorize
));
}
}
public QueryResults runQuery(
final SqlStatementFactory sqlStatementFactory,
final SqlQueryPlus query,
final String vectorize
)
{
try {
final PlannerCaptureHook capture = doCapture ? new PlannerCaptureHook() : null;
final DirectStatement stmt = sqlStatementFactory.directStatement(query);
stmt.setHook(capture);
final Sequence<Object[]> results = stmt.execute().getResults();
return new QueryResults(
query.context(),
vectorize,
stmt.prepareResult().getReturnedRowType(),
results.toList(),
builder().config.queryLogHook().getRecordedQueries(),
capture
);
}
catch (RuntimeException e) {
return new QueryResults(
query.context(),
vectorize,
e
);
}
}
@ -379,7 +399,7 @@ public class QueryTestRunner
}
/**
* Verify rsources for a prepared query against the expected list.
* Verify resources for a prepared query against the expected list.
*/
public static class VerifyResources implements QueryVerifyStep
{
@ -402,7 +422,102 @@ public class QueryTestRunner
}
/**
* Verify the exception thrown by a query using a jUnit expected
* Verify resources for a prepared query against the expected list.
*/
public static class VerifyPrepareSignature implements QueryVerifyStep
{
private final PrepareQuery prepareStep;
public VerifyPrepareSignature(PrepareQuery prepareStep)
{
this.prepareStep = prepareStep;
}
@Override
public void verify()
{
QueryTestBuilder builder = prepareStep.builder();
Assert.assertEquals(
builder.expectedSqlSchema,
SqlSchema.of(prepareStep.sqlSignature)
);
}
}
/**
* Verify resources for a prepared query against the expected list.
*/
public static class VerifyExecuteSignature extends VerifyExecStep
{
public VerifyExecuteSignature(ExecuteQuery execStep)
{
super(execStep);
}
@Override
public void verify()
{
QueryTestBuilder builder = execStep.builder();
for (QueryResults queryResults : execStep.results()) {
Assert.assertEquals(
builder.expectedSqlSchema,
SqlSchema.of(queryResults.sqlSignature)
);
}
}
}
public static class VerifyLogicalPlan extends VerifyExecStep
{
public VerifyLogicalPlan(ExecuteQuery execStep)
{
super(execStep);
}
@Override
public void verify()
{
for (QueryResults queryResults : execStep.results()) {
verifyLogicalPlan(queryResults);
}
}
private void verifyLogicalPlan(QueryResults queryResults)
{
String expectedPlan = execStep.builder().expectedLogicalPlan;
String actualPlan = visualizePlan(queryResults.capture);
Assert.assertEquals(expectedPlan, actualPlan);
}
private String visualizePlan(PlannerCaptureHook hook)
{
// Do-it-ourselves plan since the actual plan omits insert.
String queryPlan = RelOptUtil.dumpPlan(
"",
hook.relRoot().rel,
SqlExplainFormat.TEXT,
SqlExplainLevel.DIGEST_ATTRIBUTES);
String plan;
SqlInsert insertNode = hook.insertNode();
if (insertNode == null) {
plan = queryPlan;
} else {
DruidSqlIngest druidInsertNode = (DruidSqlIngest) insertNode;
// The target is a SQLIdentifier literal, pre-resolution, so does
// not include the schema.
plan = StringUtils.format(
"LogicalInsert(target=[%s], partitionedBy=[%s], clusteredBy=[%s])\n",
druidInsertNode.getTargetTable(),
druidInsertNode.getPartitionedBy() == null ? "<none>" : druidInsertNode.getPartitionedBy(),
druidInsertNode.getClusteredBy() == null ? "<none>" : druidInsertNode.getClusteredBy()
) + " " + StringUtils.replace(queryPlan, "\n ", "\n ");
}
return plan;
}
}
/**
* Verify the exception thrown by a query using a JUnit expected
* exception. This is actually an awkward way to to the job, but it is
* what the Calcite queries have long used. There are three modes.
* In the first, the exception is simply thrown and the expected
@ -455,16 +570,62 @@ public class QueryTestRunner
}
}
private final List<QueryTestRunner.QueryRunStep> runSteps;
private final List<QueryTestRunner.QueryVerifyStep> verifySteps;
private final List<QueryTestRunner.QueryRunStep> runSteps = new ArrayList<>();
private final List<QueryTestRunner.QueryVerifyStep> verifySteps = new ArrayList<>();
QueryTestRunner(
final List<QueryTestRunner.QueryRunStep> runSteps,
final List<QueryTestRunner.QueryVerifyStep> verifySteps
)
/**
* Create a test runner based on the options set in the builder.
*/
public QueryTestRunner(QueryTestBuilder builder)
{
this.runSteps = runSteps;
this.verifySteps = verifySteps;
QueryTestConfig config = builder.config;
if (builder.expectedResultsVerifier == null && builder.expectedResults != null) {
builder.expectedResultsVerifier = config.defaultResultsVerifier(
builder.expectedResults,
builder.expectedResultSignature
);
}
// Historically, a test either prepares the query (to check resources), or
// runs the query (to check the native query and results.) In the future we
// may want to do both in a single test; but we have no such tests today.
if (builder.expectedResources != null) {
Preconditions.checkArgument(
builder.expectedResultsVerifier == null,
"Cannot check both results and resources"
);
QueryTestRunner.PrepareQuery execStep = new QueryTestRunner.PrepareQuery(builder);
runSteps.add(execStep);
verifySteps.add(new QueryTestRunner.VerifyResources(execStep));
if (builder.expectedSqlSchema != null) {
verifySteps.add(new VerifyPrepareSignature(execStep));
}
} else {
QueryTestRunner.ExecuteQuery execStep = new QueryTestRunner.ExecuteQuery(builder);
runSteps.add(execStep);
// Verify the logical plan, if requested.
if (builder.expectedLogicalPlan != null) {
verifySteps.add(new QueryTestRunner.VerifyLogicalPlan(execStep));
}
if (builder.expectedSqlSchema != null) {
verifySteps.add(new VerifyExecuteSignature(execStep));
}
// Verify native queries before results. (Note: change from prior pattern
// that reversed the steps.
if (builder.expectedQueries != null) {
verifySteps.add(new QueryTestRunner.VerifyNativeQueries(execStep));
}
if (builder.expectedResultsVerifier != null) {
verifySteps.add(new QueryTestRunner.VerifyResults(execStep));
}
// The exception is always verified: either there should be no exception
// (the other steps ran), or there should be the defined exception.
verifySteps.add(new QueryTestRunner.VerifyExpectedException(execStep));
}
}
/**

View File

@ -0,0 +1,145 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.druid.java.util.common.StringUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
/**
* Simple representation of an SQL schema used to validate the row type of
* a SQL query against the SQL types. See {@code RowSignature} when the goal
* is to validate the Druid native signature.
*/
public class SqlSchema
{
public static class ColumnSignature
{
private final String name;
private final String type;
public ColumnSignature(final String name, final String type)
{
this.name = name;
this.type = type;
}
public String name()
{
return name;
}
public String type()
{
return type;
}
@Override
public String toString()
{
return StringUtils.format("%s %s", name, type);
}
@Override
public boolean equals(Object o)
{
if (o == null || o.getClass() != getClass()) {
return false;
}
ColumnSignature other = (ColumnSignature) o;
return Objects.equals(name, other.name)
&& Objects.equals(type, other.type);
}
@Override
public int hashCode()
{
return Objects.hash(name, type);
}
}
public static class Builder
{
private final List<ColumnSignature> columns = new ArrayList<>();
public Builder column(String name, String type)
{
columns.add(new ColumnSignature(name, type));
return this;
}
public SqlSchema build()
{
return new SqlSchema(columns);
}
}
private final List<ColumnSignature> columns;
public SqlSchema(final List<ColumnSignature> columns)
{
this.columns = columns;
}
public static Builder builder()
{
return new Builder();
}
public static SqlSchema of(RelDataType rowType)
{
final Builder builder = new Builder();
List<RelDataTypeField> fields = rowType.getFieldList();
for (RelDataTypeField field : fields) {
builder.column(field.getName(), field.getType().getFullTypeString());
}
return builder.build();
}
@Override
public String toString()
{
return "(" +
columns.stream().map(
c -> c.toString()).collect(Collectors.joining(", ")) +
")";
}
@Override
public boolean equals(Object o)
{
if (o == null || o.getClass() != getClass()) {
return false;
}
final SqlSchema other = (SqlSchema) o;
return Objects.equals(columns, other.columns);
}
@Override
public int hashCode()
{
return Objects.hash(columns);
}
}

View File

@ -0,0 +1,65 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.util;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Binder;
import org.apache.druid.guice.ExpressionModule;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.query.expression.LookupEnabledTestExprMacroTable;
import org.apache.druid.query.expression.LookupExprMacro;
import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider;
import org.apache.druid.query.lookup.LookupSerdeModule;
import org.apache.druid.sql.calcite.expression.builtin.QueryLookupOperatorConversion;
import org.apache.druid.sql.guice.SqlBindings;
import org.apache.druid.timeline.DataSegment;
import java.util.List;
class BasicTestModule implements DruidModule
{
@Override
public void configure(Binder binder)
{
final LookupExtractorFactoryContainerProvider lookupProvider =
LookupEnabledTestExprMacroTable.createTestLookupProvider(
ImmutableMap.of(
"a", "xa",
"abc", "xabc",
"nosuchkey", "mysteryvalue",
"6", "x6"
)
);
binder.bind(DataSegment.PruneSpecsHolder.class).toInstance(DataSegment.PruneSpecsHolder.DEFAULT);
// This Module is just to get a LookupExtractorFactoryContainerProvider with a usable "lookyloo" lookup.
binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance(lookupProvider);
SqlBindings.addOperatorConversion(binder, QueryLookupOperatorConversion.class);
ExpressionModule.addExprMacro(binder, LookupExprMacro.class);
}
@Override
public List<? extends Module> getJacksonModules()
{
return new LookupSerdeModule().getJacksonModules();
}
}

View File

@ -19,25 +19,12 @@
package org.apache.druid.sql.calcite.util;
import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Binder;
import com.google.inject.Injector;
import org.apache.druid.guice.StartupInjectorBuilder;
import org.apache.druid.initialization.CoreInjectorBuilder;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.expression.LookupEnabledTestExprMacroTable;
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider;
import org.apache.druid.query.lookup.LookupSerdeModule;
import org.apache.druid.sql.calcite.aggregation.SqlAggregationModule;
import org.apache.druid.sql.calcite.expression.builtin.QueryLookupOperatorConversion;
import org.apache.druid.sql.calcite.external.ExternalOperatorConversion;
import org.apache.druid.sql.guice.SqlBindings;
import org.apache.druid.timeline.DataSegment;
import java.util.List;
/**
* Create the injector used for {@link CalciteTests#INJECTOR}, but in a way
@ -77,37 +64,4 @@ public class CalciteTestInjectorBuilder extends CoreInjectorBuilder
throw e;
}
}
private static class BasicTestModule implements DruidModule
{
@Override
public void configure(Binder binder)
{
final LookupExtractorFactoryContainerProvider lookupProvider =
LookupEnabledTestExprMacroTable.createTestLookupProvider(
ImmutableMap.of(
"a", "xa",
"abc", "xabc",
"nosuchkey", "mysteryvalue",
"6", "x6"
)
);
binder.bind(DataSegment.PruneSpecsHolder.class).toInstance(DataSegment.PruneSpecsHolder.DEFAULT);
binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance(lookupProvider);
// This Module is just to get a LookupExtractorFactoryContainerProvider with a usable "lookyloo" lookup.
binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance(lookupProvider);
SqlBindings.addOperatorConversion(binder, QueryLookupOperatorConversion.class);
// Add "EXTERN" table macro, for CalciteInsertDmlTest.
SqlBindings.addOperatorConversion(binder, ExternalOperatorConversion.class);
}
@Override
public List<? extends Module> getJacksonModules()
{
return new LookupSerdeModule().getJacksonModules();
}
}
}

View File

@ -127,6 +127,7 @@ public class QueryFrameworkUtils
);
return new SqlStatementFactory(toolbox);
}
public static DruidSchemaCatalog createMockRootSchema(
final Injector injector,
final QueryRunnerFactoryConglomerate conglomerate,

View File

@ -32,6 +32,7 @@ import org.junit.runners.model.Statement;
import java.util.List;
import java.util.function.Consumer;
import java.util.function.Supplier;
/**
* JUnit Rule that adds a Calcite hook to log and remember Druid queries.
@ -40,22 +41,22 @@ public class QueryLogHook implements TestRule
{
private static final Logger log = new Logger(QueryLogHook.class);
private final ObjectMapper objectMapper;
private final Supplier<ObjectMapper> objectMapperSupplier;
private final List<Query<?>> recordedQueries = Lists.newCopyOnWriteArrayList();
public QueryLogHook(final ObjectMapper objectMapper)
public QueryLogHook(final Supplier<ObjectMapper> objectMapperSupplier)
{
this.objectMapper = objectMapper;
this.objectMapperSupplier = objectMapperSupplier;
}
public static QueryLogHook create()
{
return new QueryLogHook(new DefaultObjectMapper());
return new QueryLogHook(() -> DefaultObjectMapper.INSTANCE);
}
public static QueryLogHook create(final ObjectMapper objectMapper)
{
return new QueryLogHook(objectMapper);
return new QueryLogHook(() -> objectMapper);
}
public void clearRecordedQueries()
@ -83,7 +84,7 @@ public class QueryLogHook implements TestRule
recordedQueries.add((Query<?>) query);
log.info(
"Issued query: %s",
objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(query)
objectMapperSupplier.get().writerWithDefaultPrettyPrinter().writeValueAsString(query)
);
}
catch (Exception e) {

View File

@ -19,16 +19,17 @@
package org.apache.druid.sql.calcite.util;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Provides;
import org.apache.druid.guice.DruidInjectorBuilder;
import org.apache.druid.guice.ExpressionModule;
import org.apache.druid.guice.LazySingleton;
import org.apache.druid.guice.StartupInjectorBuilder;
import org.apache.druid.initialization.CoreInjectorBuilder;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.io.Closer;
@ -36,7 +37,6 @@ import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.GlobalTableDataSource;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.query.lookup.LookupExtractorFactoryContainerProvider;
import org.apache.druid.query.lookup.LookupSerdeModule;
import org.apache.druid.query.topn.TopNQueryConfig;
import org.apache.druid.segment.join.JoinableFactoryWrapper;
import org.apache.druid.server.QueryLifecycle;
@ -45,7 +45,7 @@ import org.apache.druid.server.QueryStackTests;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.sql.SqlStatementFactory;
import org.apache.druid.sql.calcite.external.ExternalDataSource;
import org.apache.druid.sql.calcite.aggregation.SqlAggregationModule;
import org.apache.druid.sql.calcite.planner.CalciteRulesManager;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
@ -61,12 +61,11 @@ import org.apache.druid.sql.calcite.view.InProcessViewManager;
import org.apache.druid.sql.calcite.view.ViewManager;
import org.apache.druid.timeline.DataSegment;
import javax.inject.Singleton;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
@ -123,6 +122,20 @@ public class SqlTestFramework
*/
public interface QueryComponentSupplier
{
/**
* Gather properties to be used within tests. Particularly useful when choosing
* among aggregator implementations: avoids the need to copy/paste code to select
* the desired implementation.
*/
void gatherProperties(Properties properties);
/**
* Configure modules needed for tests. This is the preferred way to configure
* Jackson: include the production module in this method that includes the
* required Jackson configuration.
*/
void configureGuice(DruidInjectorBuilder builder);
QueryRunnerFactoryConglomerate createCongolmerate(
Builder builder,
Closer closer
@ -130,7 +143,8 @@ public class SqlTestFramework
SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
QueryRunnerFactoryConglomerate conglomerate,
JoinableFactoryWrapper joinableFactory
JoinableFactoryWrapper joinableFactory,
Injector injector
) throws IOException;
SqlEngine createEngine(
@ -138,19 +152,16 @@ public class SqlTestFramework
ObjectMapper objectMapper
);
DruidOperatorTable createOperatorTable();
ExprMacroTable createMacroTable();
Iterable<? extends Module> getJacksonModules();
Map<String, Object> getJacksonInjectables();
/**
* Configure the JSON mapper.
*
* @see {@link #configureGuice(DruidInjectorBuilder)} for the preferred solution.
*/
void configureJsonMapper(ObjectMapper mapper);
void configureGuice(DruidInjectorBuilder builder);
JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider);
void finalizeTestFramework(SqlTestFramework sqlTestFramework);
}
public interface PlannerComponentSupplier
@ -174,18 +185,25 @@ public class SqlTestFramework
*/
public static class StandardComponentSupplier implements QueryComponentSupplier
{
private final Injector injector;
private final File temporaryFolder;
public StandardComponentSupplier(
final Injector injector,
final File temporaryFolder
)
{
this.injector = injector;
this.temporaryFolder = temporaryFolder;
}
@Override
public void gatherProperties(Properties properties)
{
}
@Override
public void configureGuice(DruidInjectorBuilder builder)
{
}
@Override
public QueryRunnerFactoryConglomerate createCongolmerate(
Builder builder,
@ -208,7 +226,8 @@ public class SqlTestFramework
@Override
public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker(
final QueryRunnerFactoryConglomerate conglomerate,
final JoinableFactoryWrapper joinableFactory
final JoinableFactoryWrapper joinableFactory,
final Injector injector
)
{
return TestDataBuilder.createMockWalker(
@ -229,42 +248,11 @@ public class SqlTestFramework
);
}
@Override
public DruidOperatorTable createOperatorTable()
{
return QueryFrameworkUtils.createOperatorTable(injector);
}
@Override
public ExprMacroTable createMacroTable()
{
return QueryFrameworkUtils.createExprMacroTable(injector);
}
@Override
public Iterable<? extends Module> getJacksonModules()
{
final List<Module> modules = new ArrayList<>(new LookupSerdeModule().getJacksonModules());
modules.add(new SimpleModule().registerSubtypes(ExternalDataSource.class));
return modules;
}
@Override
public Map<String, Object> getJacksonInjectables()
{
return new HashMap<>();
}
@Override
public void configureJsonMapper(ObjectMapper mapper)
{
}
@Override
public void configureGuice(DruidInjectorBuilder builder)
{
}
@Override
public JoinableFactoryWrapper createJoinableFactoryWrapper(LookupExtractorFactoryContainerProvider lookupProvider)
{
@ -276,6 +264,11 @@ public class SqlTestFramework
)
);
}
@Override
public void finalizeTestFramework(SqlTestFramework sqlTestFramework)
{
}
}
public static class StandardPlannerComponentSupplier implements PlannerComponentSupplier
@ -476,24 +469,17 @@ public class SqlTestFramework
@Override
public void configure(Binder binder)
{
binder.bind(DruidOperatorTable.class).toInstance(componentSupplier.createOperatorTable());
binder.bind(ExprMacroTable.class).toInstance(componentSupplier.createMacroTable());
binder.bind(DruidOperatorTable.class).in(LazySingleton.class);
binder.bind(DataSegment.PruneSpecsHolder.class).toInstance(DataSegment.PruneSpecsHolder.DEFAULT);
}
@Override
public List<? extends Module> getJacksonModules()
{
return Lists.newArrayList(componentSupplier.getJacksonModules());
}
@Provides
@Provides @Singleton
public QueryRunnerFactoryConglomerate conglomerate()
{
return componentSupplier.createCongolmerate(builder, resourceCloser);
}
@Provides
@Provides @Singleton
public JoinableFactoryWrapper joinableFactoryWrapper(final Injector injector)
{
return builder.componentSupplier.createJoinableFactoryWrapper(
@ -501,13 +487,14 @@ public class SqlTestFramework
);
}
@Provides
@Provides @Singleton
public SpecificSegmentsQuerySegmentWalker segmentsQuerySegmentWalker(final Injector injector)
{
try {
SpecificSegmentsQuerySegmentWalker walker = componentSupplier.createQuerySegmentWalker(
injector.getInstance(QueryRunnerFactoryConglomerate.class),
injector.getInstance(JoinableFactoryWrapper.class)
injector.getInstance(JoinableFactoryWrapper.class),
injector
);
resourceCloser.register(walker);
return walker;
@ -517,7 +504,7 @@ public class SqlTestFramework
}
}
@Provides
@Provides @Singleton
public QueryLifecycleFactory queryLifecycleFactory(final Injector injector)
{
return QueryFrameworkUtils.createMockQueryLifecycleFactory(
@ -538,11 +525,30 @@ public class SqlTestFramework
private SqlTestFramework(Builder builder)
{
this.componentSupplier = builder.componentSupplier;
this.injector = new CalciteTestInjectorBuilder()
.addModule(new TestSetupModule(builder))
.build();
Properties properties = new Properties();
this.componentSupplier.gatherProperties(properties);
Injector startupInjector = new StartupInjectorBuilder()
.withProperties(properties)
.build();
DruidInjectorBuilder injectorBuilder = new CoreInjectorBuilder(startupInjector)
// Ignore load scopes. This is a unit test, not a Druid node. If a
// test pulls in a module, then pull in that module, even though we are
// not the Druid node to which the module is scoped.
.ignoreLoadScopes()
.addModule(new BasicTestModule())
.addModule(new SqlAggregationModule())
.addModule(new ExpressionModule())
.addModule(new TestSetupModule(builder));
builder.componentSupplier.configureGuice(injectorBuilder);
this.injector = injectorBuilder.build();
this.engine = builder.componentSupplier.createEngine(queryLifecycleFactory(), queryJsonMapper());
componentSupplier.configureJsonMapper(queryJsonMapper());
componentSupplier.finalizeTestFramework(this);
}
public Injector injector()
{
return injector;
}
public ObjectMapper queryJsonMapper()

View File

@ -0,0 +1,3 @@
LogicalInsert(target=[dst], partitionedBy=[AllGranularity], clusteredBy=[<none>])
LogicalProject(x=[$0], y=[$1], z=[$2])
ExternalTableScan(dataSource=[{"type":"external","inputSource":{"type":"http","uris":["http:foo.com/bar.csv"],"httpAuthenticationUsername":"bob","httpAuthenticationPassword":{"type":"default","password":"secret"}},"inputFormat":{"type":"csv","columns":["x","y","z"]},"signature":[{"name":"x","type":"STRING"},{"name":"y","type":"STRING"},{"name":"z","type":"LONG"}]}])

View File

@ -0,0 +1,3 @@
LogicalInsert(target=[dst], partitionedBy=[AllGranularity], clusteredBy=[<none>])
LogicalProject(x=[$0], y=[$1], z=[$2])
ExternalTableScan(dataSource=[{"type":"external","inputSource":{"type":"inline","data":"a,b,1\nc,d,2\n"},"inputFormat":{"type":"csv","columns":["x","y","z"]},"signature":[{"name":"x","type":"STRING"},{"name":"y","type":"STRING"},{"name":"z","type":"LONG"}]}])

View File

@ -0,0 +1,4 @@
LogicalInsert(target=[druid.dst], partitionedBy=[{type=period, period=P1D, timeZone=UTC, origin=null}], clusteredBy=[2, `dim1` DESC, CEIL(`m2`)])
LogicalSort(sort0=[$1], sort1=[$2], sort2=[$3], dir0=[ASC], dir1=[DESC], dir2=[ASC])
LogicalProject(__time=[$0], floor_m1=[FLOOR($5)], dim1=[$1], ceil_m2=[CEIL($6)])
LogicalTableScan(table=[[druid, foo]])

View File

@ -0,0 +1,3 @@
LogicalInsert(target=[druid.dst], partitionedBy=[{type=period, period=PT1H, timeZone=UTC, origin=null}], clusteredBy=[<none>])
LogicalProject(__time=[$0], floor_m1=[FLOOR($5)], dim1=[$1])
LogicalTableScan(table=[[druid, foo]])

View File

@ -0,0 +1,3 @@
LogicalInsert(target=[dst], partitionedBy=[AllGranularity], clusteredBy=[<none>])
LogicalProject(x=[$0], y=[$1], z=[$2])
ExternalTableScan(dataSource=[{"type":"external","inputSource":{"type":"local","files":["/tmp/foo.csv","/tmp/bar.csv"]},"inputFormat":{"type":"csv","columns":["x","y","z"]},"signature":[{"name":"x","type":"STRING"},{"name":"y","type":"STRING"},{"name":"z","type":"LONG"}]}])