allow optimizing sql expressions and virtual columns (#12241)

* rework sql planner expression and virtual column handling

* simplify a bit

* add back and deprecate old methods, more tests, fix multi-value string coercion bug and associated tests

* spotbugs

* fix bugs with multi-value string array expression handling

* javadocs and adjust test

* better

* fix tests
This commit is contained in:
Clint Wylie 2022-02-09 14:55:50 -08:00 committed by GitHub
parent 33bc9226f0
commit 3ee66bb492
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
111 changed files with 2628 additions and 1575 deletions

View File

@ -63,6 +63,11 @@ public interface Expr extends Cacheable
return false;
}
default boolean isIdentifier()
{
return false;
}
/**
* Returns the value of expr if expr is a literal, or throws an exception otherwise.
*

View File

@ -86,6 +86,12 @@ class IdentifierExpr implements Expr
return binding;
}
@Override
public boolean isIdentifier()
{
return true;
}
@Nullable
@Override
public String getIdentifierIfIdentifier()

View File

@ -726,6 +726,7 @@ public class ParserTest extends InitializedNullHandlingTest
Expr parsedFlat = Parser.parse(expr, ExprMacroTable.nil(), true);
Assert.assertTrue(parsed.isLiteral());
Assert.assertTrue(parsedFlat.isLiteral());
Assert.assertFalse(parsed.isIdentifier());
Assert.assertEquals(type, parsed.getOutputType(emptyBinding));
Assert.assertEquals(type, parsedFlat.getOutputType(emptyBinding));
Assert.assertEquals(expected, parsed.getLiteralValue());
@ -770,6 +771,11 @@ public class ParserTest extends InitializedNullHandlingTest
)
{
final Expr parsed = Parser.parse(expression, ExprMacroTable.nil());
if (parsed instanceof IdentifierExpr) {
Assert.assertTrue(parsed.isIdentifier());
} else {
Assert.assertFalse(parsed.isIdentifier());
}
final Expr.BindingAnalysis deets = parsed.analyzeInputs();
Assert.assertEquals(expression, expected, parsed.toString());
Assert.assertEquals(expression, identifiers, deets.getRequiredBindingsList());

View File

@ -22,8 +22,6 @@ package org.apache.druid.query.aggregation.tdigestsketch;
import com.tdunning.math.stats.MergingDigest;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry;
@ -88,8 +86,8 @@ public class TDigestSketchUtils
{
// Check input for equivalence.
final boolean inputMatches;
final VirtualColumn virtualInput =
virtualColumnRegistry.findVirtualColumns(factory.requiredFields())
final DruidExpression virtualInput =
virtualColumnRegistry.findVirtualColumnExpressions(factory.requiredFields())
.stream()
.findFirst()
.orElse(null);
@ -97,7 +95,7 @@ public class TDigestSketchUtils
if (virtualInput == null) {
inputMatches = input.isDirectColumnAccess() && input.getDirectColumn().equals(factory.getFieldName());
} else {
inputMatches = ((ExpressionVirtualColumn) virtualInput).getExpression().equals(input.getExpression());
inputMatches = virtualInput.equals(input);
}
return inputMatches && compression == factory.getCompression();
}

View File

@ -35,7 +35,6 @@ import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchAggregatorFactory;
import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchUtils;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
@ -135,16 +134,11 @@ public class TDigestGenerateSketchSqlAggregator implements SqlAggregator
compression
);
} else {
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
input,
ColumnType.FLOAT
);
aggregatorFactory = new TDigestSketchAggregatorFactory(
aggName,
virtualColumn.getOutputName(),
compression
);
aggregatorFactory = new TDigestSketchAggregatorFactory(aggName, virtualColumnName, compression);
}
return Aggregation.create(aggregatorFactory);

View File

@ -38,7 +38,6 @@ import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchAggregatorFactory;
import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchToQuantilePostAggregator;
import org.apache.druid.query.aggregation.tdigestsketch.TDigestSketchUtils;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
@ -154,16 +153,11 @@ public class TDigestSketchQuantileSqlAggregator implements SqlAggregator
compression
);
} else {
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
input,
ColumnType.FLOAT
);
aggregatorFactory = new TDigestSketchAggregatorFactory(
sketchName,
virtualColumn.getOutputName(),
compression
);
aggregatorFactory = new TDigestSketchAggregatorFactory(sketchName, virtualColumnName, compression);
}
return Aggregation.create(

View File

@ -33,7 +33,6 @@ import org.apache.druid.query.aggregation.datasketches.hll.HllSketchBuildAggrega
import org.apache.druid.query.aggregation.datasketches.hll.HllSketchMergeAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.dimension.DimensionSpec;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
@ -143,12 +142,11 @@ public abstract class HllSketchBaseSqlAggregator implements SqlAggregator
if (columnArg.isDirectColumnAccess()) {
dimensionSpec = columnArg.getSimpleExtraction().toDimensionSpec(null, inputType);
} else {
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
columnArg,
dataType
);
dimensionSpec = new DefaultDimensionSpec(virtualColumn.getOutputName(), null, inputType);
dimensionSpec = new DefaultDimensionSpec(virtualColumnName, null, inputType);
}
aggregatorFactory = new HllSketchBuildAggregatorFactory(

View File

@ -38,10 +38,8 @@ import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchAggregatorFactory;
import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchToQuantilePostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
import org.apache.druid.sql.calcite.aggregation.Aggregations;
import org.apache.druid.sql.calcite.aggregation.SqlAggregator;
@ -135,8 +133,8 @@ public class DoublesSketchApproxQuantileSqlAggregator implements SqlAggregator
// Check input for equivalence.
final boolean inputMatches;
final VirtualColumn virtualInput =
virtualColumnRegistry.findVirtualColumns(theFactory.requiredFields())
final DruidExpression virtualInput =
virtualColumnRegistry.findVirtualColumnExpressions(theFactory.requiredFields())
.stream()
.findFirst()
.orElse(null);
@ -144,7 +142,7 @@ public class DoublesSketchApproxQuantileSqlAggregator implements SqlAggregator
if (virtualInput == null) {
inputMatches = input.isDirectColumnAccess() && input.getDirectColumn().equals(theFactory.getFieldName());
} else {
inputMatches = ((ExpressionVirtualColumn) virtualInput).getExpression().equals(input.getExpression());
inputMatches = virtualInput.equals(input);
}
final boolean matches = inputMatches
@ -177,14 +175,13 @@ public class DoublesSketchApproxQuantileSqlAggregator implements SqlAggregator
getMaxStreamLengthFromQueryContext(plannerContext.getQueryContext())
);
} else {
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
input,
ColumnType.FLOAT
);
aggregatorFactory = new DoublesSketchAggregatorFactory(
histogramName,
virtualColumn.getOutputName(),
virtualColumnName,
k,
getMaxStreamLengthFromQueryContext(plannerContext.getQueryContext())
);

View File

@ -35,7 +35,6 @@ import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.datasketches.quantiles.DoublesSketchAggregatorFactory;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
@ -117,14 +116,13 @@ public class DoublesSketchObjectSqlAggregator implements SqlAggregator
DoublesSketchApproxQuantileSqlAggregator.getMaxStreamLengthFromQueryContext(plannerContext.getQueryContext())
);
} else {
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
input,
ColumnType.FLOAT
);
aggregatorFactory = new DoublesSketchAggregatorFactory(
histogramName,
virtualColumn.getOutputName(),
virtualColumnName,
k,
DoublesSketchApproxQuantileSqlAggregator.getMaxStreamLengthFromQueryContext(plannerContext.getQueryContext())
);

View File

@ -32,7 +32,6 @@ import org.apache.druid.query.aggregation.datasketches.theta.SketchAggregatorFac
import org.apache.druid.query.aggregation.datasketches.theta.SketchMergeAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.dimension.DimensionSpec;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
@ -123,12 +122,11 @@ public abstract class ThetaSketchBaseSqlAggregator implements SqlAggregator
if (columnArg.isDirectColumnAccess()) {
dimensionSpec = columnArg.getSimpleExtraction().toDimensionSpec(null, inputType);
} else {
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
columnArg,
dataType
);
dimensionSpec = new DefaultDimensionSpec(virtualColumn.getOutputName(), null, inputType);
dimensionSpec = new DefaultDimensionSpec(virtualColumnName, null, inputType);
}
aggregatorFactory = new SketchMergeAggregatorFactory(

View File

@ -479,14 +479,14 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
new FieldAccessPostAggregator("p1", "a1"),
new HllSketchToEstimatePostAggregator("p3", new FieldAccessPostAggregator("p2", "a0"), false),
new HllSketchToEstimatePostAggregator("p5", new FieldAccessPostAggregator("p4", "a0"), false),
new ExpressionPostAggregator("p6", "(p5 + 1)", null, TestExprMacroTable.INSTANCE),
new ExpressionPostAggregator("p6", "(\"p5\" + 1)", null, TestExprMacroTable.INSTANCE),
new HllSketchToEstimatePostAggregator("p8", new FieldAccessPostAggregator("p7", "a2"), false),
new HllSketchToEstimatePostAggregator(
"p10",
new FieldAccessPostAggregator("p9", "a0"),
false
),
new ExpressionPostAggregator("p11", "abs(p10)", null, TestExprMacroTable.INSTANCE),
new ExpressionPostAggregator("p11", "abs(\"p10\")", null, TestExprMacroTable.INSTANCE),
new HllSketchToEstimateWithBoundsPostAggregator(
"p13",
new FieldAccessPostAggregator("p12", "a0"),
@ -500,7 +500,7 @@ public class HllSketchSqlAggregatorTest extends BaseCalciteQueryTest
new FieldAccessPostAggregator("p16", "a3"),
new HllSketchToStringPostAggregator("p18", new FieldAccessPostAggregator("p17", "a0")),
new HllSketchToStringPostAggregator("p20", new FieldAccessPostAggregator("p19", "a0")),
new ExpressionPostAggregator("p21", "upper(p20)", null, TestExprMacroTable.INSTANCE),
new ExpressionPostAggregator("p21", "upper(\"p20\")", null, TestExprMacroTable.INSTANCE),
new HllSketchToEstimatePostAggregator("p23", new FieldAccessPostAggregator("p22", "a0"), true)
)
)

View File

@ -564,7 +564,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
),
new ExpressionPostAggregator(
"p3",
"(p2 + 1000)",
"(\"p2\" + 1000)",
null,
TestExprMacroTable.INSTANCE
),
@ -578,7 +578,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
),
new ExpressionPostAggregator(
"p6",
"(p5 + 1000)",
"(\"p5\" + 1000)",
null,
TestExprMacroTable.INSTANCE
),
@ -590,7 +590,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
),
0.5f
),
new ExpressionPostAggregator("p9", "abs(p8)", null, TestExprMacroTable.INSTANCE),
new ExpressionPostAggregator("p9", "abs(\"p8\")", null, TestExprMacroTable.INSTANCE),
new DoublesSketchToQuantilesPostAggregator(
"p11",
new FieldAccessPostAggregator(
@ -633,7 +633,7 @@ public class DoublesSketchSqlAggregatorTest extends BaseCalciteQueryTest
),
new ExpressionPostAggregator(
"p20",
"replace(replace(p19,'HeapCompactDoublesSketch','HeapUpdateDoublesSketch'),"
"replace(replace(\"p19\",'HeapCompactDoublesSketch','HeapUpdateDoublesSketch'),"
+ "'Combined Buffer Capacity : 6',"
+ "'Combined Buffer Capacity : 8')",
null,

View File

@ -37,10 +37,8 @@ import org.apache.druid.query.aggregation.bloom.BloomFilterAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.dimension.DimensionSpec;
import org.apache.druid.query.dimension.ExtractionDimensionSpec;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
import org.apache.druid.sql.calcite.aggregation.SqlAggregator;
import org.apache.druid.sql.calcite.expression.DruidExpression;
@ -114,10 +112,10 @@ public class BloomFilterSqlAggregator implements SqlAggregator
// Check input for equivalence.
final boolean inputMatches;
final VirtualColumn virtualInput = virtualColumnRegistry.findVirtualColumns(theFactory.requiredFields())
.stream()
.findFirst()
.orElse(null);
final DruidExpression virtualInput = virtualColumnRegistry.findVirtualColumnExpressions(theFactory.requiredFields())
.stream()
.findFirst()
.orElse(null);
if (virtualInput == null) {
if (input.isDirectColumnAccess()) {
inputMatches =
@ -128,7 +126,7 @@ public class BloomFilterSqlAggregator implements SqlAggregator
input.getSimpleExtraction().getExtractionFn().equals(theFactory.getField().getExtractionFn());
}
} else {
inputMatches = ((ExpressionVirtualColumn) virtualInput).getExpression().equals(input.getExpression());
inputMatches = virtualInput.equals(input);
}
final boolean matches = inputMatches && theFactory.getMaxNumEntries() == maxNumEntries;
@ -161,14 +159,13 @@ public class BloomFilterSqlAggregator implements SqlAggregator
input.getSimpleExtraction().getExtractionFn()
);
} else {
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
input,
inputOperand.getType()
);
spec = new DefaultDimensionSpec(
virtualColumn.getOutputName(),
StringUtils.format("%s:%s", name, virtualColumn.getOutputName())
virtualColumnName,
StringUtils.format("%s:%s", name, virtualColumnName)
);
}

View File

@ -33,7 +33,6 @@ import org.apache.druid.query.filter.BloomDimFilter;
import org.apache.druid.query.filter.BloomKFilter;
import org.apache.druid.query.filter.BloomKFilterHolder;
import org.apache.druid.query.filter.DimFilter;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.DruidExpression;
@ -104,16 +103,15 @@ public class BloomFilterOperatorConversion extends DirectOperatorConversion
null
);
} else if (virtualColumnRegistry != null) {
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
druidExpression,
operands.get(0).getType()
);
if (virtualColumn == null) {
if (virtualColumnName == null) {
return null;
}
return new BloomDimFilter(
virtualColumn.getOutputName(),
virtualColumnName,
holder,
null,
null

View File

@ -37,10 +37,8 @@ import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.histogram.FixedBucketsHistogram;
import org.apache.druid.query.aggregation.histogram.FixedBucketsHistogramAggregatorFactory;
import org.apache.druid.query.aggregation.histogram.QuantilePostAggregator;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
import org.apache.druid.sql.calcite.aggregation.Aggregations;
import org.apache.druid.sql.calcite.aggregation.SqlAggregator;
@ -187,8 +185,8 @@ public class FixedBucketsHistogramQuantileSqlAggregator implements SqlAggregator
// Check input for equivalence.
final boolean inputMatches;
final VirtualColumn virtualInput =
virtualColumnRegistry.findVirtualColumns(theFactory.requiredFields())
final DruidExpression virtualInput =
virtualColumnRegistry.findVirtualColumnExpressions(theFactory.requiredFields())
.stream()
.findFirst()
.orElse(null);
@ -197,8 +195,7 @@ public class FixedBucketsHistogramQuantileSqlAggregator implements SqlAggregator
inputMatches = input.isDirectColumnAccess()
&& input.getDirectColumn().equals(theFactory.getFieldName());
} else {
inputMatches = ((ExpressionVirtualColumn) virtualInput).getExpression()
.equals(input.getExpression());
inputMatches = virtualInput.equals(input);
}
final boolean matches = inputMatches
@ -230,14 +227,13 @@ public class FixedBucketsHistogramQuantileSqlAggregator implements SqlAggregator
false
);
} else {
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
input,
ColumnType.FLOAT
);
aggregatorFactory = new FixedBucketsHistogramAggregatorFactory(
histogramName,
virtualColumn.getOutputName(),
virtualColumnName,
numBuckets,
lowerLimit,
upperLimit,

View File

@ -38,11 +38,9 @@ import org.apache.druid.query.aggregation.histogram.ApproximateHistogram;
import org.apache.druid.query.aggregation.histogram.ApproximateHistogramAggregatorFactory;
import org.apache.druid.query.aggregation.histogram.ApproximateHistogramFoldingAggregatorFactory;
import org.apache.druid.query.aggregation.histogram.QuantilePostAggregator;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
import org.apache.druid.sql.calcite.aggregation.Aggregations;
import org.apache.druid.sql.calcite.aggregation.SqlAggregator;
@ -137,8 +135,8 @@ public class QuantileSqlAggregator implements SqlAggregator
// Check input for equivalence.
final boolean inputMatches;
final VirtualColumn virtualInput =
virtualColumnRegistry.findVirtualColumns(theFactory.requiredFields())
final DruidExpression virtualInput =
virtualColumnRegistry.findVirtualColumnExpressions(theFactory.requiredFields())
.stream()
.findFirst()
.orElse(null);
@ -147,8 +145,7 @@ public class QuantileSqlAggregator implements SqlAggregator
inputMatches = input.isDirectColumnAccess()
&& input.getDirectColumn().equals(theFactory.getFieldName());
} else {
inputMatches = ((ExpressionVirtualColumn) virtualInput).getExpression()
.equals(input.getExpression());
inputMatches = virtualInput.equals(input);
}
final boolean matches = inputMatches
@ -192,11 +189,11 @@ public class QuantileSqlAggregator implements SqlAggregator
);
}
} else {
final VirtualColumn virtualColumn =
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, input, ColumnType.FLOAT);
final String virtualColumnName =
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(input, ColumnType.FLOAT);
aggregatorFactory = new ApproximateHistogramAggregatorFactory(
histogramName,
virtualColumn.getOutputName(),
virtualColumnName,
resolution,
numBuckets,
lowerLimit,

View File

@ -35,7 +35,6 @@ import org.apache.druid.query.aggregation.variance.StandardDeviationPostAggregat
import org.apache.druid.query.aggregation.variance.VarianceAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.dimension.DimensionSpec;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
@ -93,9 +92,9 @@ public abstract class BaseVarianceSqlAggregator implements SqlAggregator
if (input.isSimpleExtraction()) {
dimensionSpec = input.getSimpleExtraction().toDimensionSpec(null, inputType);
} else {
VirtualColumn virtualColumn =
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, input, dataType);
dimensionSpec = new DefaultDimensionSpec(virtualColumn.getOutputName(), null, inputType);
String virtualColumnName =
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(input, dataType);
dimensionSpec = new DefaultDimensionSpec(virtualColumnName, null, inputType);
}
if (inputType == null) {

View File

@ -57,6 +57,6 @@ public class SleepOperatorConversion implements SqlOperatorConversion
@Override
public DruidExpression toDruidExpression(PlannerContext plannerContext, RowSignature rowSignature, RexNode rexNode)
{
return OperatorConversions.convertCall(plannerContext, rowSignature, rexNode, "sleep");
return OperatorConversions.convertDirectCall(plannerContext, rowSignature, rexNode, "sleep");
}
}

View File

@ -321,6 +321,13 @@ public class GeneratorBasicSchemas
GeneratorColumnSchema.makeLazyDiscreteUniform("string4", ValueType.STRING, false, 1, null, 1, 10_000),
GeneratorColumnSchema.makeLazyDiscreteUniform("string5", ValueType.STRING, false, 1, 0.3, 1, 1_000_000),
// multi string dims
GeneratorColumnSchema.makeSequential("multi-string1", ValueType.STRING, false, 8, null, 0, 10000),
GeneratorColumnSchema.makeLazyZipf("multi-string2", ValueType.STRING, false, 8, null, 1, 100, 1.5),
GeneratorColumnSchema.makeLazyZipf("multi-string3", ValueType.STRING, false, 16, 0.1, 1, 1_000_000, 2.0),
GeneratorColumnSchema.makeLazyDiscreteUniform("multi-string4", ValueType.STRING, false, 4, null, 1, 10_000),
GeneratorColumnSchema.makeLazyDiscreteUniform("multi-string5", ValueType.STRING, false, 8, 0.3, 1, 1_000_000),
// numeric dims
GeneratorColumnSchema.makeSequential("long1", ValueType.LONG, false, 1, null, 0, 10000),
GeneratorColumnSchema.makeLazyZipf("long2", ValueType.LONG, false, 1, null, 1, 101, 1.5),

View File

@ -45,6 +45,10 @@ public class ExpressionPlan
* expression has no inputs and can be optimized into a constant selector
*/
CONSTANT,
/**
* expression is a simple identifier expression, do not transform
*/
IDENTIFIER,
/**
* expression has a single, single valued input, and is dictionary encoded if the value is a string, and does
* not produce non-scalar output

View File

@ -69,6 +69,8 @@ public class ExpressionPlanner
// check and set traits which allow optimized selectors to be created
if (columns.isEmpty()) {
traits.add(ExpressionPlan.Trait.CONSTANT);
} else if (expression.isIdentifier()) {
traits.add(ExpressionPlan.Trait.IDENTIFIER);
} else if (columns.size() == 1) {
final String column = Iterables.getOnlyElement(columns);
final ColumnCapabilities capabilities = inspector.getColumnCapabilities(column);
@ -105,7 +107,14 @@ public class ExpressionPlanner
// if we didn't eliminate this expression as a single input scalar or mappable expression, it might need
// automatic transformation to map across multi-valued inputs (or row by row detection in the worst case)
if (ExpressionPlan.none(traits, ExpressionPlan.Trait.SINGLE_INPUT_SCALAR)) {
if (
ExpressionPlan.none(
traits,
ExpressionPlan.Trait.SINGLE_INPUT_SCALAR,
ExpressionPlan.Trait.CONSTANT,
ExpressionPlan.Trait.IDENTIFIER
)
) {
final Set<String> definitelyMultiValued = new HashSet<>();
final Set<String> definitelyArray = new HashSet<>();
for (String column : analysis.getRequiredBindings()) {

View File

@ -296,7 +296,8 @@ public class ExpressionSelectors
} else if (capabilities.is(ValueType.STRING)) {
supplier = supplierFromDimensionSelector(
columnSelectorFactory.makeDimensionSelector(new DefaultDimensionSpec(columnName, columnName)),
multiVal
multiVal,
homogenizeNullMultiValueStringArrays
);
} else {
// complex type just pass straight through
@ -349,7 +350,8 @@ public class ExpressionSelectors
*
* @see org.apache.druid.segment.BaseNullableColumnValueSelector#isNull() for why this only works in the numeric case
*/
private static <T> Supplier<T> makeNullableNumericSupplier(
@VisibleForTesting
public static <T> Supplier<T> makeNullableNumericSupplier(
ColumnValueSelector selector,
Supplier<T> supplier
)
@ -371,7 +373,7 @@ public class ExpressionSelectors
* arrays if specified.
*/
@VisibleForTesting
static Supplier<Object> supplierFromDimensionSelector(final DimensionSelector selector, boolean coerceArray)
static Supplier<Object> supplierFromDimensionSelector(final DimensionSelector selector, boolean coerceArray, boolean homogenize)
{
Preconditions.checkNotNull(selector, "selector");
return () -> {
@ -381,8 +383,12 @@ public class ExpressionSelectors
return selector.lookupName(row.get(0));
} else {
// column selector factories hate you and use [] and [null] interchangeably for nullish data
if (row.size() == 0) {
return new Object[]{null};
if (row.size() == 0 || (row.size() == 1 && selector.getObject() == null)) {
if (homogenize) {
return new Object[]{null};
} else {
return null;
}
}
final Object[] strings = new Object[row.size()];
// noinspection SSBasedInspection

View File

@ -64,7 +64,7 @@ public class SingleStringInputCachingExpressionColumnValueSelector implements Co
this.selector = Preconditions.checkNotNull(selector, "selector");
this.expression = Preconditions.checkNotNull(expression, "expression");
final Supplier<Object> inputSupplier = ExpressionSelectors.supplierFromDimensionSelector(selector, false);
final Supplier<Object> inputSupplier = ExpressionSelectors.supplierFromDimensionSelector(selector, false, false);
this.bindings = InputBindings.singleProvider(ExpressionType.STRING, name -> inputSupplier.get());
if (selector.getValueCardinality() == DimensionDictionarySelector.CARDINALITY_UNKNOWN) {

View File

@ -816,10 +816,19 @@ public class MultiValuedDimensionTest extends InitializedNullHandlingTest
query
);
List<ResultRow> expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", NullHandling.replaceWithDefault() ? -1L : null, "count", 6L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", 1L, "count", 2L)
);
List<ResultRow> expectedResults;
if (NullHandling.replaceWithDefault()) {
expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", -1L, "count", 4L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", 0L, "count", 2L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", 1L, "count", 2L)
);
} else {
expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", null, "count", 6L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", 1L, "count", 2L)
);
}
TestHelper.assertExpectedObjects(expectedResults, result.toList(), "expr-auto");
}
@ -858,7 +867,7 @@ public class MultiValuedDimensionTest extends InitializedNullHandlingTest
);
List<ResultRow> expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foo", "count", 2L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", NullHandling.replaceWithDefault() ? null : "foo", "count", 2L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foot1, foot2, foot3", "count", 2L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foot3, foot4, foot5", "count", 2L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foot5, foot6, foot7", "count", 2L)
@ -977,7 +986,7 @@ public class MultiValuedDimensionTest extends InitializedNullHandlingTest
.setVirtualColumns(
new ExpressionVirtualColumn(
"tt",
"fold((tag, acc) -> concat(concat(acc, case_searched(acc == '', '', ', '), concat('foo', tag)))), tags, '')",
"fold((tag, acc) -> concat(concat(acc, case_searched(acc == '', '', ', '), concat('foo', tag))), tags, '')",
ColumnType.STRING,
TestExprMacroTable.INSTANCE
)
@ -995,7 +1004,7 @@ public class MultiValuedDimensionTest extends InitializedNullHandlingTest
);
List<ResultRow> expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foo", "count", 2L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", NullHandling.replaceWithDefault() ? null : "foo", "count", 2L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foot1, foot2, foot3", "count", 2L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foot3, foot4, foot5", "count", 2L),
GroupByQueryRunnerTestHelper.createExpectedRow(query, "1970", "tt", "foot5, foot6, foot7", "count", 2L)

View File

@ -35,6 +35,7 @@ import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.data.RoaringBitmapSerdeFactory;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.serde.ComplexMetrics;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
@ -211,6 +212,57 @@ public class SegmentGenerator implements Closeable
return retVal;
}
public IncrementalIndex generateIncrementalIndex(
final DataSegment dataSegment,
final GeneratorSchemaInfo schemaInfo,
final Granularity granularity,
final int numRows
)
{
// In case we need to generate hyperUniques.
ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde());
final String dataHash = Hashing.sha256()
.newHasher()
.putString(dataSegment.getId().toString(), StandardCharsets.UTF_8)
.putString(schemaInfo.toString(), StandardCharsets.UTF_8)
.putString(granularity.toString(), StandardCharsets.UTF_8)
.putInt(numRows)
.hash()
.toString();
final DataGenerator dataGenerator = new DataGenerator(
schemaInfo.getColumnSchemas(),
dataSegment.getId().hashCode(), /* Use segment identifier hashCode as seed */
schemaInfo.getDataInterval(),
numRows
);
final IncrementalIndexSchema indexSchema = new IncrementalIndexSchema.Builder()
.withDimensionsSpec(schemaInfo.getDimensionsSpec())
.withMetrics(schemaInfo.getAggsArray())
.withRollup(schemaInfo.isWithRollup())
.withQueryGranularity(granularity)
.build();
final List<InputRow> rows = new ArrayList<>();
for (int i = 0; i < numRows; i++) {
final InputRow row = dataGenerator.nextRow();
rows.add(row);
if ((i + 1) % 20000 == 0) {
log.info("%,d/%,d rows generated for[%s].", i + 1, numRows, dataSegment);
}
}
log.info("%,d/%,d rows generated for[%s].", numRows, numRows, dataSegment);
return makeIncrementalIndex(dataSegment.getId(), dataHash, 0, rows, indexSchema);
}
@Override
public void close() throws IOException
{
@ -236,6 +288,23 @@ public class SegmentGenerator implements Closeable
.buildMMappedIndex();
}
private IncrementalIndex makeIncrementalIndex(
final SegmentId identifier,
final String dataHash,
final int indexNumber,
final List<InputRow> rows,
final IncrementalIndexSchema indexSchema
)
{
return IndexBuilder
.create()
.schema(indexSchema)
.tmpDir(new File(getSegmentDir(identifier, dataHash), String.valueOf(indexNumber)))
.segmentWriteOutMediumFactory(OffHeapMemorySegmentWriteOutMediumFactory.instance())
.rows(rows)
.buildIncrementalIndex();
}
private File getSegmentDir(final SegmentId identifier, final String dataHash)
{
return new File(cacheDir, StringUtils.format("%s_%s", identifier, dataHash));

View File

@ -31,26 +31,42 @@ import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.ExprEval;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.math.expr.Parser;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector;
import org.apache.druid.segment.BaseSingleValueDimensionSelector;
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.ColumnValueSelector;
import org.apache.druid.segment.Cursor;
import org.apache.druid.segment.DimensionSelector;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexStorageAdapter;
import org.apache.druid.segment.StorageAdapter;
import org.apache.druid.segment.TestObjectColumnSelector;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ColumnCapabilities;
import org.apache.druid.segment.generator.GeneratorBasicSchemas;
import org.apache.druid.segment.generator.GeneratorSchemaInfo;
import org.apache.druid.segment.generator.SegmentGenerator;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter;
import org.apache.druid.segment.incremental.IndexSizeExceededException;
import org.apache.druid.segment.incremental.OnheapIncrementalIndex;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.apache.druid.utils.CloseableUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.ArrayList;
@ -59,6 +75,319 @@ import java.util.List;
public class ExpressionSelectorsTest extends InitializedNullHandlingTest
{
private static Closer CLOSER;
private static QueryableIndex QUERYABLE_INDEX;
private static QueryableIndexStorageAdapter QUERYABLE_INDEX_STORAGE_ADAPTER;
private static IncrementalIndex INCREMENTAL_INDEX;
private static IncrementalIndexStorageAdapter INCREMENTAL_INDEX_STORAGE_ADAPTER;
private static List<StorageAdapter> ADAPTERS;
@BeforeClass
public static void setup()
{
CLOSER = Closer.create();
final GeneratorSchemaInfo schemaInfo = GeneratorBasicSchemas.SCHEMA_MAP.get("expression-testbench");
final DataSegment dataSegment = DataSegment.builder()
.dataSource("foo")
.interval(schemaInfo.getDataInterval())
.version("1")
.shardSpec(new LinearShardSpec(0))
.size(0)
.build();
final SegmentGenerator segmentGenerator = CLOSER.register(new SegmentGenerator());
final int numRows = 10_000;
INCREMENTAL_INDEX = CLOSER.register(
segmentGenerator.generateIncrementalIndex(dataSegment, schemaInfo, Granularities.HOUR, numRows)
);
INCREMENTAL_INDEX_STORAGE_ADAPTER = new IncrementalIndexStorageAdapter(INCREMENTAL_INDEX);
QUERYABLE_INDEX = CLOSER.register(
segmentGenerator.generate(dataSegment, schemaInfo, Granularities.HOUR, numRows)
);
QUERYABLE_INDEX_STORAGE_ADAPTER = new QueryableIndexStorageAdapter(QUERYABLE_INDEX);
ADAPTERS = ImmutableList.of(INCREMENTAL_INDEX_STORAGE_ADAPTER, QUERYABLE_INDEX_STORAGE_ADAPTER);
}
@AfterClass
public static void teardown()
{
CloseableUtils.closeAndSuppressExceptions(CLOSER, throwable -> {});
}
@Test
public void test_single_value_string_bindings()
{
final String columnName = "string3";
for (StorageAdapter adapter : ADAPTERS) {
Sequence<Cursor> cursorSequence = adapter.makeCursors(
null,
adapter.getInterval(),
VirtualColumns.EMPTY,
Granularities.ALL,
false,
null
);
List<Cursor> flatten = cursorSequence.toList();
for (Cursor cursor : flatten) {
ColumnSelectorFactory factory = cursor.getColumnSelectorFactory();
ExpressionPlan plan = ExpressionPlanner.plan(
adapter,
Parser.parse("\"string3\"", TestExprMacroTable.INSTANCE)
);
ExpressionPlan plan2 = ExpressionPlanner.plan(
adapter,
Parser.parse(
"concat(\"string3\", 'foo')",
TestExprMacroTable.INSTANCE
)
);
Expr.ObjectBinding bindings = ExpressionSelectors.createBindings(factory, plan);
Expr.ObjectBinding bindings2 = ExpressionSelectors.createBindings(factory, plan2);
DimensionSelector dimSelector = factory.makeDimensionSelector(DefaultDimensionSpec.of(columnName));
ColumnValueSelector valueSelector = factory.makeColumnValueSelector(columnName);
// realtime index needs to handle as multi-value in case any new values are added during processing
final boolean isMultiVal = factory.getColumnCapabilities(columnName) == null ||
factory.getColumnCapabilities(columnName).hasMultipleValues().isMaybeTrue();
while (!cursor.isDone()) {
Object dimSelectorVal = dimSelector.getObject();
Object valueSelectorVal = valueSelector.getObject();
Object bindingVal = bindings.get(columnName);
Object bindingVal2 = bindings2.get(columnName);
if (dimSelectorVal == null) {
Assert.assertNull(dimSelectorVal);
Assert.assertNull(valueSelectorVal);
Assert.assertNull(bindingVal);
if (isMultiVal) {
Assert.assertNull(((Object[]) bindingVal2)[0]);
} else {
Assert.assertNull(bindingVal2);
}
} else {
if (isMultiVal) {
Assert.assertEquals(dimSelectorVal, ((Object[]) bindingVal)[0]);
Assert.assertEquals(valueSelectorVal, ((Object[]) bindingVal)[0]);
Assert.assertEquals(dimSelectorVal, ((Object[]) bindingVal2)[0]);
Assert.assertEquals(valueSelectorVal, ((Object[]) bindingVal2)[0]);
} else {
Assert.assertEquals(dimSelectorVal, bindingVal);
Assert.assertEquals(valueSelectorVal, bindingVal);
Assert.assertEquals(dimSelectorVal, bindingVal2);
Assert.assertEquals(valueSelectorVal, bindingVal2);
}
}
cursor.advance();
}
}
}
}
@Test
public void test_multi_value_string_bindings()
{
final String columnName = "multi-string3";
for (StorageAdapter adapter : ADAPTERS) {
Sequence<Cursor> cursorSequence = adapter.makeCursors(
null,
adapter.getInterval(),
VirtualColumns.EMPTY,
Granularities.ALL,
false,
null
);
List<Cursor> flatten = cursorSequence.toList();
for (Cursor cursor : flatten) {
ColumnSelectorFactory factory = cursor.getColumnSelectorFactory();
// identifier, uses dimension selector supplier supplier, no null coercion
ExpressionPlan plan = ExpressionPlanner.plan(
adapter,
Parser.parse("\"multi-string3\"", TestExprMacroTable.INSTANCE)
);
// array output, uses object selector supplier, no null coercion
ExpressionPlan plan2 = ExpressionPlanner.plan(
adapter,
Parser.parse(
"array_append(\"multi-string3\", 'foo')",
TestExprMacroTable.INSTANCE
)
);
// array input, uses dimension selector supplier, no null coercion
ExpressionPlan plan3 = ExpressionPlanner.plan(
adapter,
Parser.parse(
"array_length(\"multi-string3\")",
TestExprMacroTable.INSTANCE
)
);
// used as scalar, has null coercion
ExpressionPlan plan4 = ExpressionPlanner.plan(
adapter,
Parser.parse(
"concat(\"multi-string3\", 'foo')",
TestExprMacroTable.INSTANCE
)
);
Expr.ObjectBinding bindings = ExpressionSelectors.createBindings(factory, plan);
Expr.ObjectBinding bindings2 = ExpressionSelectors.createBindings(factory, plan2);
Expr.ObjectBinding bindings3 = ExpressionSelectors.createBindings(factory, plan3);
Expr.ObjectBinding bindings4 = ExpressionSelectors.createBindings(factory, plan4);
DimensionSelector dimSelector = factory.makeDimensionSelector(DefaultDimensionSpec.of(columnName));
ColumnValueSelector valueSelector = factory.makeColumnValueSelector(columnName);
while (!cursor.isDone()) {
Object dimSelectorVal = dimSelector.getObject();
Object valueSelectorVal = valueSelector.getObject();
Object bindingVal = bindings.get(columnName);
Object bindingVal2 = bindings2.get(columnName);
Object bindingVal3 = bindings3.get(columnName);
Object bindingVal4 = bindings4.get(columnName);
if (dimSelectorVal == null) {
Assert.assertNull(dimSelectorVal);
Assert.assertNull(valueSelectorVal);
Assert.assertNull(bindingVal);
Assert.assertNull(bindingVal2);
Assert.assertNull(bindingVal3);
// binding4 has null coercion
Assert.assertArrayEquals(new Object[]{null}, (Object[]) bindingVal4);
} else {
Assert.assertArrayEquals(((List) dimSelectorVal).toArray(), (Object[]) bindingVal);
Assert.assertArrayEquals(((List) valueSelectorVal).toArray(), (Object[]) bindingVal);
Assert.assertArrayEquals(((List) dimSelectorVal).toArray(), (Object[]) bindingVal2);
Assert.assertArrayEquals(((List) valueSelectorVal).toArray(), (Object[]) bindingVal2);
Assert.assertArrayEquals(((List) dimSelectorVal).toArray(), (Object[]) bindingVal3);
Assert.assertArrayEquals(((List) valueSelectorVal).toArray(), (Object[]) bindingVal3);
}
cursor.advance();
}
}
}
}
@Test
public void test_long_bindings()
{
final String columnName = "long3";
for (StorageAdapter adapter : ADAPTERS) {
Sequence<Cursor> cursorSequence = adapter.makeCursors(
null,
adapter.getInterval(),
VirtualColumns.EMPTY,
Granularities.ALL,
false,
null
);
List<Cursor> flatten = cursorSequence.toList();
for (Cursor cursor : flatten) {
ColumnSelectorFactory factory = cursor.getColumnSelectorFactory();
// an assortment of plans
ExpressionPlan plan = ExpressionPlanner.plan(
adapter,
Parser.parse("\"long3\"", TestExprMacroTable.INSTANCE)
);
ExpressionPlan plan2 = ExpressionPlanner.plan(
adapter,
Parser.parse(
"\"long3\" + 3",
TestExprMacroTable.INSTANCE
)
);
Expr.ObjectBinding bindings = ExpressionSelectors.createBindings(factory, plan);
Expr.ObjectBinding bindings2 = ExpressionSelectors.createBindings(factory, plan2);
ColumnValueSelector valueSelector = factory.makeColumnValueSelector(columnName);
while (!cursor.isDone()) {
Object bindingVal = bindings.get(columnName);
Object bindingVal2 = bindings2.get(columnName);
if (valueSelector.isNull()) {
Assert.assertNull(valueSelector.getObject());
Assert.assertNull(bindingVal);
Assert.assertNull(bindingVal2);
} else {
Assert.assertEquals(valueSelector.getObject(), bindingVal);
Assert.assertEquals(valueSelector.getLong(), bindingVal);
Assert.assertEquals(valueSelector.getObject(), bindingVal2);
Assert.assertEquals(valueSelector.getLong(), bindingVal2);
}
cursor.advance();
}
}
}
}
@Test
public void test_double_bindings()
{
final String columnName = "double3";
for (StorageAdapter adapter : ADAPTERS) {
Sequence<Cursor> cursorSequence = adapter.makeCursors(
null,
adapter.getInterval(),
VirtualColumns.EMPTY,
Granularities.ALL,
false,
null
);
List<Cursor> flatten = cursorSequence.toList();
for (Cursor cursor : flatten) {
ColumnSelectorFactory factory = cursor.getColumnSelectorFactory();
// an assortment of plans
ExpressionPlan plan = ExpressionPlanner.plan(
adapter,
Parser.parse("\"double3\"", TestExprMacroTable.INSTANCE)
);
ExpressionPlan plan2 = ExpressionPlanner.plan(
adapter,
Parser.parse(
"\"double3\" + 3.0",
TestExprMacroTable.INSTANCE
)
);
Expr.ObjectBinding bindings = ExpressionSelectors.createBindings(factory, plan);
Expr.ObjectBinding bindings2 = ExpressionSelectors.createBindings(factory, plan2);
ColumnValueSelector valueSelector = factory.makeColumnValueSelector(columnName);
while (!cursor.isDone()) {
Object bindingVal = bindings.get(columnName);
Object bindingVal2 = bindings2.get(columnName);
if (valueSelector.isNull()) {
Assert.assertNull(valueSelector.getObject());
Assert.assertNull(bindingVal);
Assert.assertNull(bindingVal2);
} else {
Assert.assertEquals(valueSelector.getObject(), bindingVal);
Assert.assertEquals(valueSelector.getDouble(), bindingVal);
Assert.assertEquals(valueSelector.getObject(), bindingVal2);
Assert.assertEquals(valueSelector.getDouble(), bindingVal2);
}
cursor.advance();
}
}
}
}
@Test
public void test_canMapOverDictionary_oneSingleValueInput()
{
@ -153,6 +482,7 @@ public class ExpressionSelectorsTest extends InitializedNullHandlingTest
final SettableSupplier<String> settableSupplier = new SettableSupplier<>();
final Supplier<Object> supplier = ExpressionSelectors.supplierFromDimensionSelector(
dimensionSelectorFromSupplier(settableSupplier),
false,
false
);

View File

@ -40,7 +40,6 @@ import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.math.expr.ExpressionType;
import org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
@ -118,8 +117,7 @@ public class ArraySqlAggregator implements SqlAggregator
if (arg.isDirectColumnAccess()) {
fieldName = arg.getDirectColumn();
} else {
VirtualColumn vc = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, arg, elementType);
fieldName = vc.getOutputName();
fieldName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(arg, elementType);
}
if (aggregateCall.isDistinct()) {

View File

@ -32,7 +32,6 @@ import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.post.ArithmeticPostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
@ -116,8 +115,8 @@ public class AvgSqlAggregator implements SqlAggregator
project,
Iterables.getOnlyElement(aggregateCall.getArgList())
);
VirtualColumn vc = virtualColumnRegistry.getVirtualColumnByExpression(arg.getExpression(), resolutionArg.getType());
fieldName = vc != null ? vc.getOutputName() : null;
String vc = virtualColumnRegistry.getVirtualColumnByExpression(arg, resolutionArg.getType());
fieldName = vc != null ? vc : null;
expression = vc != null ? null : arg.getExpression();
}
final String sumName = Calcites.makePrefixedName(name, "sum");

View File

@ -38,7 +38,6 @@ import org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory;
import org.apache.druid.query.aggregation.FilteredAggregatorFactory;
import org.apache.druid.query.filter.NotDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
@ -149,8 +148,7 @@ public class BitwiseSqlAggregator implements SqlAggregator
if (arg.isDirectColumnAccess()) {
fieldName = arg.getDirectColumn();
} else {
VirtualColumn vc = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, arg, ColumnType.LONG);
fieldName = vc.getOutputName();
fieldName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(arg, ColumnType.LONG);
}
return Aggregation.create(

View File

@ -41,7 +41,6 @@ import org.apache.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostA
import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.dimension.DimensionSpec;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
@ -118,9 +117,8 @@ public class BuiltinApproxCountDistinctSqlAggregator implements SqlAggregator
if (arg.isSimpleExtraction()) {
dimensionSpec = arg.getSimpleExtraction().toDimensionSpec(null, inputType);
} else {
VirtualColumn virtualColumn =
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, arg, dataType);
dimensionSpec = new DefaultDimensionSpec(virtualColumn.getOutputName(), null, inputType);
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(arg, dataType);
dimensionSpec = new DefaultDimensionSpec(virtualColumnName, null, inputType);
}
aggregatorFactory = new CardinalityAggregatorFactory(

View File

@ -51,7 +51,6 @@ import org.apache.druid.query.aggregation.last.FloatLastAggregatorFactory;
import org.apache.druid.query.aggregation.last.LongLastAggregatorFactory;
import org.apache.druid.query.aggregation.last.StringLastAggregatorFactory;
import org.apache.druid.query.aggregation.post.FinalizingFieldAccessPostAggregator;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
@ -64,7 +63,6 @@ import org.apache.druid.sql.calcite.planner.UnsupportedSQLQueryException;
import org.apache.druid.sql.calcite.rel.VirtualColumnRegistry;
import javax.annotation.Nullable;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@ -239,9 +237,7 @@ public class EarliestLatestAnySqlAggregator implements SqlAggregator
columnName = arg.getDirectColumn();
} else {
final RelDataType dataType = rexNode.getType();
final VirtualColumn virtualColumn =
virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, arg, dataType);
columnName = virtualColumn.getOutputName();
columnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(arg, dataType);
}
return columnName;
}

View File

@ -27,7 +27,6 @@ import org.apache.calcite.sql.SqlAggFunction;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.GroupingAggregatorFactory;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
import org.apache.druid.sql.calcite.aggregation.SqlAggregator;
@ -116,11 +115,10 @@ public class GroupingSqlAggregator implements SqlAggregator
return expression.getDirectColumn();
}
VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
expression,
node.getType()
);
return virtualColumn.getOutputName();
return virtualColumn;
}
}

View File

@ -43,7 +43,6 @@ import org.apache.druid.query.aggregation.ExpressionLambdaAggregatorFactory;
import org.apache.druid.query.aggregation.FilteredAggregatorFactory;
import org.apache.druid.query.filter.NotDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.aggregation.Aggregation;
@ -135,8 +134,7 @@ public class StringSqlAggregator implements SqlAggregator
if (arg.isDirectColumnAccess()) {
fieldName = arg.getDirectColumn();
} else {
VirtualColumn vc = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(plannerContext, arg, elementType);
fieldName = vc.getOutputName();
fieldName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(arg, elementType);
}
final String finalizer = StringUtils.format("if(array_length(o) == 0, null, array_to_string(o, '%s'))", separator);

View File

@ -25,6 +25,7 @@ import org.apache.calcite.sql.SqlOperator;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
@ -58,22 +59,7 @@ public class BinaryOperatorConversion implements SqlOperatorConversion
plannerContext,
rowSignature,
rexNode,
operands -> {
if (operands.size() < 2) {
throw new ISE("Got binary operator[%s] with %s args", operator.getName(), operands.size());
}
return DruidExpression.fromExpression(
StringUtils.format(
"(%s)",
joiner.join(
operands.stream()
.map(DruidExpression::getExpression)
.collect(Collectors.toList())
)
)
);
}
getOperatorFunction(rexNode)
);
}
@ -90,23 +76,30 @@ public class BinaryOperatorConversion implements SqlOperatorConversion
plannerContext,
rowSignature,
rexNode,
operands -> {
if (operands.size() < 2) {
throw new ISE("Got binary operator[%s] with %s args", operator.getName(), operands.size());
}
return DruidExpression.fromExpression(
StringUtils.format(
"(%s)",
joiner.join(
operands.stream()
.map(DruidExpression::getExpression)
.collect(Collectors.toList())
)
)
);
},
getOperatorFunction(rexNode),
postAggregatorVisitor
);
}
private DruidExpression.DruidExpressionBuilder getOperatorFunction(RexNode rexNode)
{
return operands -> {
if (operands.size() < 2) {
throw new ISE("Got binary operator[%s] with %s args", operator.getName(), operands.size());
}
return DruidExpression.ofExpression(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
(args) -> StringUtils.format(
"(%s)",
joiner.join(
args.stream()
.map(DruidExpression::getExpression)
.collect(Collectors.toList())
)
),
operands
);
};
}
}

View File

@ -22,6 +22,7 @@ package org.apache.druid.sql.calcite.expression;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlOperator;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
@ -53,11 +54,11 @@ public class DirectOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
return OperatorConversions.convertDirectCall(
plannerContext,
rowSignature,
rexNode,
operands -> DruidExpression.fromExpression(DruidExpression.functionCall(druidFunctionName, operands))
druidFunctionName
);
}
@ -79,7 +80,11 @@ public class DirectOperatorConversion implements SqlOperatorConversion
plannerContext,
rowSignature,
rexNode,
operands -> DruidExpression.fromExpression(DruidExpression.functionCall(druidFunctionName, operands)),
operands -> DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
druidFunctionName,
operands
),
postAggregatorVisitor
);
}

View File

@ -20,10 +20,10 @@
package org.apache.druid.sql.calcite.expression;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.io.BaseEncoding;
import com.google.common.primitives.Chars;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.guava.nary.TrinaryFn;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.math.expr.Parser;
@ -32,7 +32,9 @@ import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.function.Function;
@ -41,58 +43,64 @@ import java.util.function.Function;
* Represents two kinds of expression-like concepts that native Druid queries support:
*
* (1) SimpleExtractions, which are direct column access, possibly with an extractionFn
* (2) native Druid expressions.
* (2) native Druid expressions and virtual columns
*
* When added to {@link org.apache.druid.sql.calcite.rel.VirtualColumnRegistry} whenever used by projections, filters,
* aggregators, or other query components, these will be converted into native virtual columns using
* {@link #toVirtualColumn(String, ColumnType, ExprMacroTable)}
*
* Approximate expression structure is retained in the {@link #arguments}, which when fed into the
* {@link ExpressionBuilder} that all {@link DruidExpression} must be created with will produce the final String
* expression (which will be later parsed into {@link Expr} during native processing).
*
* This allows using the {@link DruidExpressionShuttle} to examine this expression "tree" and potentially rewrite some
* or all of the tree as it visits nodes, and the {@link #nodeType} property provides high level classification of
* the types of expression which a node produces.
*/
public class DruidExpression
{
public enum NodeType
{
/**
* constant value
*/
LITERAL,
/**
* Identifier for a direct physical or virtual column access (column name or virtual column name)
*/
IDENTIFIER,
/**
* Standard native druid expression, which can compute a string that can be parsed into {@link Expr}, or used
* as an {@link ExpressionVirtualColumn}
*/
EXPRESSION,
/**
* Expression backed by a specialized {@link VirtualColumn}, which might provide more optimized evaluation than
* is possible with the standard
*/
SPECIALIZED
}
// Must be sorted
private static final char[] SAFE_CHARS = " ,._-;:(){}[]<>!@#$%^&*`~?/".toCharArray();
private static final VirtualColumnBuilder DEFAULT_VIRTUAL_COLUMN_BUILDER = new ExpressionVirtualColumnBuilder();
static {
Arrays.sort(SAFE_CHARS);
}
@Nullable
private final SimpleExtraction simpleExtraction;
private final String expression;
private final TrinaryFn<String, ColumnType, ExprMacroTable, VirtualColumn> virtualColumnFn;
private DruidExpression(@Nullable final SimpleExtraction simpleExtraction, final String expression, @Nullable final TrinaryFn<String, ColumnType, ExprMacroTable, VirtualColumn> virtualColumnFn)
private static String escape(final String s)
{
this.simpleExtraction = simpleExtraction;
this.expression = Preconditions.checkNotNull(expression);
this.virtualColumnFn = virtualColumnFn != null
? virtualColumnFn
: (name, outputType, macroTable) ->
new ExpressionVirtualColumn(name, expression, outputType, macroTable);
}
public static DruidExpression of(final SimpleExtraction simpleExtraction, final String expression)
{
return new DruidExpression(simpleExtraction, expression, null);
}
public static DruidExpression fromColumn(final String column)
{
return new DruidExpression(SimpleExtraction.of(column, null), StringUtils.format("\"%s\"", escape(column)), null);
}
public static DruidExpression fromExpression(final String expression)
{
return new DruidExpression(null, expression, null);
}
public static DruidExpression fromFunctionCall(final String functionName, final List<DruidExpression> args)
{
return new DruidExpression(null, functionCall(functionName, args), null);
}
public static DruidExpression forVirtualColumn(
final String expression,
final TrinaryFn<String, ColumnType, ExprMacroTable, VirtualColumn> virtualColumnFunction
)
{
return new DruidExpression(null, expression, virtualColumnFunction);
final StringBuilder escaped = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
final char c = s.charAt(i);
if (Character.isLetterOrDigit(c) || Arrays.binarySearch(SAFE_CHARS, c) >= 0) {
escaped.append(c);
} else {
escaped.append("\\u").append(BaseEncoding.base16().encode(Chars.toByteArray(c)));
}
}
return escaped.toString();
}
public static String numberLiteral(final Number n)
@ -110,49 +118,227 @@ public class DruidExpression
return "null";
}
public static String functionCall(final String functionName, final List<DruidExpression> args)
public static ExpressionBuilder functionCall(final String functionName)
{
Preconditions.checkNotNull(functionName, "functionName");
Preconditions.checkNotNull(args, "args");
final StringBuilder builder = new StringBuilder(functionName);
builder.append("(");
return args -> {
Preconditions.checkNotNull(args, "args");
for (int i = 0; i < args.size(); i++) {
final DruidExpression arg = Preconditions.checkNotNull(args.get(i), "arg #%s", i);
builder.append(arg.getExpression());
if (i < args.size() - 1) {
builder.append(",");
final StringBuilder builder = new StringBuilder(functionName);
builder.append("(");
for (int i = 0; i < args.size(); i++) {
final DruidExpression arg = Preconditions.checkNotNull(args.get(i), "arg #%s", i);
builder.append(arg.getExpression());
if (i < args.size() - 1) {
builder.append(",");
}
}
}
builder.append(")");
builder.append(")");
return builder.toString();
return builder.toString();
};
}
/**
* @deprecated use {@link #functionCall(String)} instead
*/
@Deprecated
public static String functionCall(final String functionName, final List<DruidExpression> args)
{
return functionCall(functionName).buildExpression(args);
}
/**
* @deprecated use {@link #functionCall(String)} instead
*/
@Deprecated
public static String functionCall(final String functionName, final DruidExpression... args)
{
return functionCall(functionName, Arrays.asList(args));
return functionCall(functionName).buildExpression(Arrays.asList(args));
}
private static String escape(final String s)
public static DruidExpression ofLiteral(
@Nullable final ColumnType columnType,
final String literal
)
{
final StringBuilder escaped = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
final char c = s.charAt(i);
if (Character.isLetterOrDigit(c) || Arrays.binarySearch(SAFE_CHARS, c) >= 0) {
escaped.append(c);
} else {
escaped.append("\\u").append(BaseEncoding.base16().encode(Chars.toByteArray(c)));
}
}
return escaped.toString();
return new DruidExpression(
NodeType.LITERAL,
columnType,
null,
new LiteralExpressionBuilder(literal),
Collections.emptyList(),
null
);
}
public static DruidExpression ofStringLiteral(final String s)
{
return ofLiteral(ColumnType.STRING, stringLiteral(s));
}
public static DruidExpression ofColumn(
@Nullable final ColumnType columnType,
final String column,
final SimpleExtraction simpleExtraction
)
{
return new DruidExpression(
NodeType.IDENTIFIER,
columnType,
simpleExtraction,
new IdentifierExpressionBuilder(column),
Collections.emptyList(),
null
);
}
public static DruidExpression ofColumn(final ColumnType columnType, final String column)
{
return ofColumn(columnType, column, SimpleExtraction.of(column, null));
}
public static DruidExpression ofFunctionCall(
final ColumnType columnType,
final String functionName,
final List<DruidExpression> args
)
{
return new DruidExpression(NodeType.EXPRESSION, columnType, null, functionCall(functionName), args, null);
}
public static DruidExpression ofVirtualColumn(
final ColumnType type,
final ExpressionBuilder expressionBuilder,
final List<DruidExpression> arguments,
final VirtualColumnBuilder virtualColumnBuilder
)
{
return new DruidExpression(NodeType.SPECIALIZED, type, null, expressionBuilder, arguments, virtualColumnBuilder);
}
public static DruidExpression ofExpression(
@Nullable final ColumnType columnType,
final ExpressionBuilder expressionBuilder,
final List<DruidExpression> arguments
)
{
return new DruidExpression(NodeType.EXPRESSION, columnType, null, expressionBuilder, arguments, null);
}
public static DruidExpression ofExpression(
@Nullable final ColumnType columnType,
final SimpleExtraction simpleExtraction,
final ExpressionBuilder expressionBuilder,
final List<DruidExpression> arguments
)
{
return new DruidExpression(NodeType.EXPRESSION, columnType, simpleExtraction, expressionBuilder, arguments, null);
}
/**
* @deprecated use {@link #ofExpression(ColumnType, SimpleExtraction, ExpressionBuilder, List)} instead to participate
* in virtual column and expression optimization
*/
@Deprecated
public static DruidExpression of(final SimpleExtraction simpleExtraction, final String expression)
{
return new DruidExpression(
NodeType.EXPRESSION,
null,
simpleExtraction,
new LiteralExpressionBuilder(expression),
Collections.emptyList(),
null
);
}
/**
* @deprecated use {@link #ofColumn(ColumnType, String)} or {@link #ofColumn(ColumnType, String, SimpleExtraction)}
* instead
*/
@Deprecated
public static DruidExpression fromColumn(final String column)
{
return new DruidExpression(
NodeType.EXPRESSION,
null,
SimpleExtraction.of(column, null),
new IdentifierExpressionBuilder(column),
Collections.emptyList(),
null
);
}
/**
* @deprecated use {@link #ofExpression(ColumnType, ExpressionBuilder, List)} instead to participate in virtual
* column and expression optimization
*/
@Deprecated
public static DruidExpression fromExpression(final String expression)
{
return new DruidExpression(
NodeType.EXPRESSION,
null,
null,
new LiteralExpressionBuilder(expression),
Collections.emptyList(),
null
);
}
/**
* @deprecated use {@link #ofFunctionCall(ColumnType, String, List)} instead to participate in virtual column and
* expression optimization
*/
@Deprecated
public static DruidExpression fromFunctionCall(final String functionName, final List<DruidExpression> args)
{
return new DruidExpression(
NodeType.EXPRESSION,
null,
null,
new LiteralExpressionBuilder(functionCall(functionName, args)),
Collections.emptyList(),
null
);
}
private final NodeType nodeType;
@Nullable
private final ColumnType druidType;
private final List<DruidExpression> arguments;
@Nullable
private final SimpleExtraction simpleExtraction;
private final ExpressionBuilder expressionBuilder;
private final VirtualColumnBuilder virtualColumnBuilder;
private final Supplier<String> expression;
private DruidExpression(
final NodeType nodeType,
@Nullable final ColumnType druidType,
@Nullable final SimpleExtraction simpleExtraction,
final ExpressionBuilder expressionBuilder,
final List<DruidExpression> arguments,
@Nullable final VirtualColumnBuilder virtualColumnBuilder
)
{
this.nodeType = nodeType;
this.druidType = druidType;
this.simpleExtraction = simpleExtraction;
this.expressionBuilder = Preconditions.checkNotNull(expressionBuilder);
this.arguments = arguments;
this.virtualColumnBuilder = virtualColumnBuilder != null ? virtualColumnBuilder : DEFAULT_VIRTUAL_COLUMN_BUILDER;
this.expression = Suppliers.memoize(() -> this.expressionBuilder.buildExpression(this.arguments));
}
public String getExpression()
{
return expression;
return expression.get();
}
public boolean isDirectColumnAccess()
@ -170,23 +356,38 @@ public class DruidExpression
return simpleExtraction != null;
}
public Expr parse(final ExprMacroTable macroTable)
{
return Parser.parse(expression, macroTable);
}
public SimpleExtraction getSimpleExtraction()
{
return Preconditions.checkNotNull(simpleExtraction);
}
public List<DruidExpression> getArguments()
{
return arguments;
}
public Expr parse(final ExprMacroTable macroTable)
{
return Parser.parse(expression.get(), macroTable);
}
public VirtualColumn toVirtualColumn(
final String name,
final ColumnType outputType,
final ExprMacroTable macroTable
)
{
return virtualColumnFn.apply(name, outputType, macroTable);
return virtualColumnBuilder.build(name, outputType, expression.get(), macroTable);
}
public NodeType getType()
{
return nodeType;
}
public ColumnType getDruidType()
{
return druidType;
}
public DruidExpression map(
@ -195,9 +396,36 @@ public class DruidExpression
)
{
return new DruidExpression(
nodeType,
druidType,
simpleExtraction == null ? null : extractionMap.apply(simpleExtraction),
expressionMap.apply(expression),
null
(args) -> expressionMap.apply(expressionBuilder.buildExpression(args)),
arguments,
virtualColumnBuilder
);
}
public DruidExpression withArguments(List<DruidExpression> newArgs)
{
return new DruidExpression(
nodeType,
druidType,
simpleExtraction,
expressionBuilder,
newArgs,
virtualColumnBuilder
);
}
public DruidExpression visit(DruidExpressionShuttle shuttle)
{
return new DruidExpression(
nodeType,
druidType,
simpleExtraction,
expressionBuilder,
shuttle.visitAll(arguments),
virtualColumnBuilder
);
}
@ -212,21 +440,108 @@ public class DruidExpression
}
final DruidExpression that = (DruidExpression) o;
return Objects.equals(simpleExtraction, that.simpleExtraction) &&
Objects.equals(expression, that.expression);
Objects.equals(nodeType, that.nodeType) &&
Objects.equals(druidType, that.druidType) &&
Objects.equals(arguments, that.arguments) &&
Objects.equals(expression.get(), that.expression.get());
}
@Override
public int hashCode()
{
return Objects.hash(simpleExtraction, expression);
return Objects.hash(nodeType, druidType, simpleExtraction, arguments, expression.get());
}
@Override
public String toString()
{
return "DruidExpression{" +
"simpleExtraction=" + simpleExtraction +
", expression='" + expression + '\'' +
"type=" + (druidType != null ? druidType.asTypeString() : nullLiteral()) +
", simpleExtraction=" + simpleExtraction +
", expression='" + expression.get() + '\'' +
", arguments=" + arguments +
'}';
}
@FunctionalInterface
public interface DruidExpressionShuttle
{
DruidExpression visit(DruidExpression expression);
default List<DruidExpression> visitAll(List<DruidExpression> expressions)
{
List<DruidExpression> list = new ArrayList<>(expressions.size());
for (DruidExpression expr : expressions) {
list.add(visit(expr));
}
return list;
}
}
@FunctionalInterface
public interface DruidExpressionBuilder
{
DruidExpression buildExpression(List<DruidExpression> arguments);
}
@FunctionalInterface
public interface ExpressionBuilder
{
String buildExpression(List<DruidExpression> arguments);
}
@FunctionalInterface
public interface VirtualColumnBuilder
{
VirtualColumn build(String name, ColumnType outputType, String expression, ExprMacroTable macroTable);
}
/**
* Direct reference to a physical or virtual column
*/
public static class IdentifierExpressionBuilder implements ExpressionBuilder
{
private final String identifier;
public IdentifierExpressionBuilder(String identifier)
{
this.identifier = escape(identifier);
}
@Override
public String buildExpression(List<DruidExpression> arguments)
{
// identifier expression has no arguments
return "\"" + identifier + "\"";
}
}
/**
* Builds expressions for a static constant value
*/
public static class LiteralExpressionBuilder implements ExpressionBuilder
{
private final String literal;
public LiteralExpressionBuilder(String literal)
{
this.literal = literal;
}
@Override
public String buildExpression(List<DruidExpression> arguments)
{
// literal expression has no arguments
return literal;
}
}
public static class ExpressionVirtualColumnBuilder implements VirtualColumnBuilder
{
@Override
public VirtualColumn build(String name, ColumnType outputType, String expression, ExprMacroTable macroTable)
{
return new ExpressionVirtualColumn(name, expression, outputType, macroTable);
}
}
}

View File

@ -50,8 +50,8 @@ import org.apache.druid.query.filter.OrDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.query.ordering.StringComparator;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.filtration.BoundRefKey;
import org.apache.druid.sql.calcite.filtration.Bounds;
@ -65,6 +65,7 @@ import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
/**
* A collection of functions for translating from Calcite expressions into Druid objects.
@ -222,11 +223,12 @@ public class Expressions
// Translate field references.
final RexInputRef ref = (RexInputRef) rexNode;
final String columnName = rowSignature.getColumnName(ref.getIndex());
final Optional<ColumnType> columnType = rowSignature.getColumnType(ref.getIndex());
if (columnName == null) {
throw new ISE("Expression referred to nonexistent index[%d]", ref.getIndex());
}
return DruidExpression.fromColumn(columnName);
return DruidExpression.ofColumn(columnType.orElse(null), columnName);
}
private static DruidExpression rexCallToDruidExpression(
@ -258,7 +260,7 @@ public class Expressions
if (postAggregator != null) {
postAggregatorVisitor.addPostAgg(postAggregator);
String exprName = postAggregator.getName();
return DruidExpression.of(SimpleExtraction.of(exprName, null), exprName);
return DruidExpression.ofColumn(postAggregator.getType(rowSignature), exprName);
}
}
@ -272,6 +274,7 @@ public class Expressions
}
}
@Nullable
private static DruidExpression literalToDruidExpression(
final PlannerContext plannerContext,
final RexNode rexNode
@ -280,32 +283,34 @@ public class Expressions
final SqlTypeName sqlTypeName = rexNode.getType().getSqlTypeName();
// Translate literal.
final ColumnType columnType = Calcites.getColumnTypeForRelDataType(rexNode.getType());
if (RexLiteral.isNullLiteral(rexNode)) {
return DruidExpression.fromExpression(DruidExpression.nullLiteral());
return DruidExpression.ofLiteral(columnType, DruidExpression.nullLiteral());
} else if (SqlTypeName.NUMERIC_TYPES.contains(sqlTypeName)) {
return DruidExpression.fromExpression(DruidExpression.numberLiteral((Number) RexLiteral.value(rexNode)));
return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral((Number) RexLiteral.value(rexNode)));
} else if (SqlTypeFamily.INTERVAL_DAY_TIME == sqlTypeName.getFamily()) {
// Calcite represents DAY-TIME intervals in milliseconds.
final long milliseconds = ((Number) RexLiteral.value(rexNode)).longValue();
return DruidExpression.fromExpression(DruidExpression.numberLiteral(milliseconds));
return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(milliseconds));
} else if (SqlTypeFamily.INTERVAL_YEAR_MONTH == sqlTypeName.getFamily()) {
// Calcite represents YEAR-MONTH intervals in months.
final long months = ((Number) RexLiteral.value(rexNode)).longValue();
return DruidExpression.fromExpression(DruidExpression.numberLiteral(months));
return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(months));
} else if (SqlTypeName.STRING_TYPES.contains(sqlTypeName)) {
return DruidExpression.fromExpression(DruidExpression.stringLiteral(RexLiteral.stringValue(rexNode)));
return DruidExpression.ofStringLiteral(RexLiteral.stringValue(rexNode));
} else if (SqlTypeName.TIMESTAMP == sqlTypeName || SqlTypeName.DATE == sqlTypeName) {
if (RexLiteral.isNullLiteral(rexNode)) {
return DruidExpression.fromExpression(DruidExpression.nullLiteral());
return DruidExpression.ofLiteral(columnType, DruidExpression.nullLiteral());
} else {
return DruidExpression.fromExpression(
return DruidExpression.ofLiteral(
columnType,
DruidExpression.numberLiteral(
Calcites.calciteDateTimeLiteralToJoda(rexNode, plannerContext.getTimeZone()).getMillis()
)
);
}
} else if (SqlTypeName.BOOLEAN == sqlTypeName) {
return DruidExpression.fromExpression(DruidExpression.numberLiteral(RexLiteral.booleanValue(rexNode) ? 1 : 0));
return DruidExpression.ofLiteral(columnType, DruidExpression.numberLiteral(RexLiteral.booleanValue(rexNode) ? 1 : 0));
} else {
// Can't translate other literals.
return null;
@ -470,14 +475,13 @@ public class Expressions
druidExpression.getSimpleExtraction().getExtractionFn()
);
} else if (virtualColumnRegistry != null) {
final VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
final String virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
druidExpression,
operand.getType()
);
equalFilter = new SelectorDimFilter(
virtualColumn.getOutputName(),
virtualColumn,
NullHandling.defaultStringValue(),
null
);
@ -559,13 +563,10 @@ public class Expressions
column = lhsExpression.getSimpleExtraction().getColumn();
extractionFn = lhsExpression.getSimpleExtraction().getExtractionFn();
} else if (virtualColumnRegistry != null) {
VirtualColumn virtualLhs = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
column = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
lhsExpression,
lhs.getType()
);
column = virtualLhs.getOutputName();
extractionFn = null;
} else {
return null;

View File

@ -71,8 +71,9 @@ import java.util.stream.IntStream;
*/
public class OperatorConversions
{
@Nullable
public static DruidExpression convertCall(
public static DruidExpression convertDirectCall(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode,
@ -83,12 +84,16 @@ public class OperatorConversions
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.fromFunctionCall(functionName, druidExpressions)
druidExpressions -> DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
functionName,
druidExpressions
)
);
}
@Nullable
public static DruidExpression convertCall(
public static DruidExpression convertDirectCallWithExtraction(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode,
@ -100,9 +105,31 @@ public class OperatorConversions
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
druidExpressions -> DruidExpression.ofExpression(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
simpleExtractionFunction == null ? null : simpleExtractionFunction.apply(druidExpressions),
DruidExpression.functionCall(functionName, druidExpressions)
DruidExpression.functionCall(functionName),
druidExpressions
)
);
}
@Nullable
public static DruidExpression convertCallBuilder(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode,
final DruidExpression.ExpressionBuilder expressionBuilder
)
{
return convertCall(
plannerContext,
rowSignature,
rexNode,
(operands) -> DruidExpression.ofExpression(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
expressionBuilder,
operands
)
);
}
@ -112,7 +139,7 @@ public class OperatorConversions
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode,
final Function<List<DruidExpression>, DruidExpression> expressionFunction
final DruidExpression.DruidExpressionBuilder expressionFunction
)
{
final RexCall call = (RexCall) rexNode;
@ -127,7 +154,38 @@ public class OperatorConversions
return null;
}
return expressionFunction.apply(druidExpressions);
return expressionFunction.buildExpression(druidExpressions);
}
@Deprecated
@Nullable
public static DruidExpression convertCall(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode,
final String functionName
)
{
return convertDirectCall(plannerContext, rowSignature, rexNode, functionName);
}
@Deprecated
@Nullable
public static DruidExpression convertCall(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode,
final String functionName,
final Function<List<DruidExpression>, SimpleExtraction> simpleExtractionFunction
)
{
return convertDirectCallWithExtraction(
plannerContext,
rowSignature,
rexNode,
functionName,
simpleExtractionFunction
);
}
/**
@ -153,7 +211,7 @@ public class OperatorConversions
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode,
final Function<List<DruidExpression>, DruidExpression> expressionFunction,
final DruidExpression.DruidExpressionBuilder expressionFunction,
final PostAggregatorVisitor postAggregatorVisitor
)
{
@ -170,7 +228,7 @@ public class OperatorConversions
return null;
}
return expressionFunction.apply(druidExpressions);
return expressionFunction.buildExpression(druidExpressions);
}
/**

View File

@ -50,16 +50,14 @@ public class UnaryFunctionOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
return OperatorConversions.convertCallBuilder(
plannerContext,
rowSignature,
rexNode,
operands -> DruidExpression.fromExpression(
StringUtils.format(
"%s(%s)",
druidOperator,
Iterables.getOnlyElement(operands).getExpression()
)
operands -> StringUtils.format(
"%s(%s)",
druidOperator,
Iterables.getOnlyElement(operands).getExpression()
)
);
}

View File

@ -50,16 +50,14 @@ public class UnaryPrefixOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
return OperatorConversions.convertCallBuilder(
plannerContext,
rowSignature,
rexNode,
operands -> DruidExpression.fromExpression(
StringUtils.format(
"(%s %s)",
druidOperator,
Iterables.getOnlyElement(operands).getExpression()
)
operands -> StringUtils.format(
"(%s %s)",
druidOperator,
Iterables.getOnlyElement(operands).getExpression()
)
);
}

View File

@ -50,16 +50,14 @@ public class UnarySuffixOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
return OperatorConversions.convertCallBuilder(
plannerContext,
rowSignature,
rexNode,
operands -> DruidExpression.fromExpression(
StringUtils.format(
"(%s %s)",
Iterables.getOnlyElement(operands).getExpression(),
druidOperator
)
operands -> StringUtils.format(
"(%s %s)",
Iterables.getOnlyElement(operands).getExpression(),
druidOperator
)
);
}

View File

@ -19,20 +19,15 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArrayAppendOperatorConversion implements SqlOperatorConversion
public class ArrayAppendOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("ARRAY_APPEND")
@ -53,27 +48,8 @@ public class ArrayAppendOperatorConversion implements SqlOperatorConversion
.returnTypeInference(Calcites.ARG0_NULLABLE_ARRAY_RETURN_TYPE_INFERENCE)
.build();
@Override
public SqlOperator calciteOperator()
public ArrayAppendOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_append", druidExpressions)
)
);
super(SQL_FUNCTION, "array_append");
}
}

View File

@ -19,20 +19,15 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArrayConcatOperatorConversion implements SqlOperatorConversion
public class ArrayConcatOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("ARRAY_CONCAT")
@ -53,27 +48,8 @@ public class ArrayConcatOperatorConversion implements SqlOperatorConversion
.returnTypeInference(Calcites.ARG0_NULLABLE_ARRAY_RETURN_TYPE_INFERENCE)
.build();
@Override
public SqlOperator calciteOperator()
public ArrayConcatOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_concat", druidExpressions)
)
);
super(SQL_FUNCTION, "array_concat");
}
}

View File

@ -19,40 +19,13 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
public class ArrayConstructorOperatorConversion implements SqlOperatorConversion
public class ArrayConstructorOperatorConversion extends DirectOperatorConversion
{
private static final SqlOperator SQL_FUNCTION = SqlStdOperatorTable.ARRAY_VALUE_CONSTRUCTOR;
@Override
public SqlOperator calciteOperator()
public ArrayConstructorOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array", druidExpressions)
)
);
super(SqlStdOperatorTable.ARRAY_VALUE_CONSTRUCTOR, "array");
}
}

View File

@ -19,20 +19,15 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArrayLengthOperatorConversion implements SqlOperatorConversion
public class ArrayLengthOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("ARRAY_LENGTH")
@ -46,27 +41,8 @@ public class ArrayLengthOperatorConversion implements SqlOperatorConversion
.returnTypeCascadeNullable(SqlTypeName.INTEGER)
.build();
@Override
public SqlOperator calciteOperator()
public ArrayLengthOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_length", druidExpressions)
)
);
super(SQL_FUNCTION, "array_length");
}
}

View File

@ -19,20 +19,15 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArrayOffsetOfOperatorConversion implements SqlOperatorConversion
public class ArrayOffsetOfOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("ARRAY_OFFSET_OF")
@ -50,27 +45,8 @@ public class ArrayOffsetOfOperatorConversion implements SqlOperatorConversion
.returnTypeNullable(SqlTypeName.INTEGER)
.build();
@Override
public SqlOperator calciteOperator()
public ArrayOffsetOfOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_offset_of", druidExpressions)
)
);
super(SQL_FUNCTION, "array_offset_of");
}
}

View File

@ -19,19 +19,14 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArrayOffsetOperatorConversion implements SqlOperatorConversion
public class ArrayOffsetOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("ARRAY_OFFSET")
@ -49,27 +44,8 @@ public class ArrayOffsetOperatorConversion implements SqlOperatorConversion
.returnTypeInference(ArrayOrdinalOperatorConversion.ARG0_ELEMENT_INFERENCE)
.build();
@Override
public SqlOperator calciteOperator()
public ArrayOffsetOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_offset", druidExpressions)
)
);
super(SQL_FUNCTION, "array_offset");
}
}

View File

@ -19,20 +19,15 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArrayOrdinalOfOperatorConversion implements SqlOperatorConversion
public class ArrayOrdinalOfOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("ARRAY_ORDINAL_OF")
@ -50,27 +45,8 @@ public class ArrayOrdinalOfOperatorConversion implements SqlOperatorConversion
.returnTypeCascadeNullable(SqlTypeName.INTEGER)
.build();
@Override
public SqlOperator calciteOperator()
public ArrayOrdinalOfOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_ordinal_of", druidExpressions)
)
);
super(SQL_FUNCTION, "array_ordinal_of");
}
}

View File

@ -20,22 +20,17 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlOperatorBinding;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlReturnTypeInference;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeUtil;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArrayOrdinalOperatorConversion implements SqlOperatorConversion
public class ArrayOrdinalOperatorConversion extends DirectOperatorConversion
{
static final SqlReturnTypeInference ARG0_ELEMENT_INFERENCE = new ArrayElementReturnTypeInference();
@ -55,28 +50,9 @@ public class ArrayOrdinalOperatorConversion implements SqlOperatorConversion
.returnTypeInference(ARG0_ELEMENT_INFERENCE)
.build();
@Override
public SqlOperator calciteOperator()
public ArrayOrdinalOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_ordinal", druidExpressions)
)
);
super(SQL_FUNCTION, "array_ordinal");
}
static class ArrayElementReturnTypeInference implements SqlReturnTypeInference

View File

@ -19,20 +19,15 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArrayPrependOperatorConversion implements SqlOperatorConversion
public class ArrayPrependOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("ARRAY_PREPEND")
@ -53,27 +48,8 @@ public class ArrayPrependOperatorConversion implements SqlOperatorConversion
.returnTypeInference(Calcites.ARG1_NULLABLE_ARRAY_RETURN_TYPE_INFERENCE)
.build();
@Override
public SqlOperator calciteOperator()
public ArrayPrependOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_prepend", druidExpressions)
)
);
super(SQL_FUNCTION, "array_prepend");
}
}

View File

@ -19,20 +19,15 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArraySliceOperatorConversion implements SqlOperatorConversion
public class ArraySliceOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("ARRAY_SLICE")
@ -61,27 +56,8 @@ public class ArraySliceOperatorConversion implements SqlOperatorConversion
.returnTypeInference(Calcites.ARG0_NULLABLE_ARRAY_RETURN_TYPE_INFERENCE)
.build();
@Override
public SqlOperator calciteOperator()
public ArraySliceOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_slice", druidExpressions)
)
);
super(SQL_FUNCTION, "array_slice");
}
}

View File

@ -19,20 +19,15 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ArrayToStringOperatorConversion implements SqlOperatorConversion
public class ArrayToStringOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("ARRAY_TO_STRING")
@ -50,27 +45,8 @@ public class ArrayToStringOperatorConversion implements SqlOperatorConversion
.returnTypeCascadeNullable(SqlTypeName.VARCHAR)
.build();
@Override
public SqlOperator calciteOperator()
public ArrayToStringOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("array_to_string", druidExpressions)
)
);
super(SQL_FUNCTION, "array_to_string");
}
}

View File

@ -30,6 +30,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class BTrimOperatorConversion implements SqlOperatorConversion
@ -64,13 +65,15 @@ public class BTrimOperatorConversion implements SqlOperatorConversion
return TrimOperatorConversion.makeTrimExpression(
SqlTrimFunction.Flag.BOTH,
druidExpressions.get(0),
druidExpressions.get(1)
druidExpressions.get(1),
Calcites.getColumnTypeForRelDataType(rexNode.getType())
);
} else {
return TrimOperatorConversion.makeTrimExpression(
SqlTrimFunction.Flag.BOTH,
druidExpressions.get(0),
DruidExpression.fromExpression(DruidExpression.stringLiteral(" "))
DruidExpression.ofStringLiteral(" "),
Calcites.getColumnTypeForRelDataType(rexNode.getType())
);
}
}

View File

@ -30,10 +30,12 @@ import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.PeriodGranularity;
import org.apache.druid.math.expr.ExprType;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.joda.time.Period;
@ -105,9 +107,14 @@ public class CastOperatorConversion implements SqlOperatorConversion
final SqlTypeName toType = rexNode.getType().getSqlTypeName();
if (SqlTypeName.CHAR_TYPES.contains(fromType) && SqlTypeName.DATETIME_TYPES.contains(toType)) {
return castCharToDateTime(plannerContext, operandExpression, toType);
return castCharToDateTime(
plannerContext,
operandExpression,
toType,
Calcites.getColumnTypeForRelDataType(rexNode.getType())
);
} else if (SqlTypeName.DATETIME_TYPES.contains(fromType) && SqlTypeName.CHAR_TYPES.contains(toType)) {
return castDateTimeToChar(plannerContext, operandExpression, fromType);
return castDateTimeToChar(plannerContext, operandExpression, fromType, Calcites.getColumnTypeForRelDataType(rexNode.getType()));
} else {
// Handle other casts.
final ExprType fromExprType = EXPRESSION_TYPES.get(fromType);
@ -146,16 +153,18 @@ public class CastOperatorConversion implements SqlOperatorConversion
private static DruidExpression castCharToDateTime(
final PlannerContext plannerContext,
final DruidExpression operand,
final SqlTypeName toType
final SqlTypeName toType,
final ColumnType toDruidType
)
{
// Cast strings to datetimes by parsing them from SQL format.
final DruidExpression timestampExpression = DruidExpression.fromFunctionCall(
final DruidExpression timestampExpression = DruidExpression.ofFunctionCall(
toDruidType,
"timestamp_parse",
ImmutableList.of(
operand,
DruidExpression.fromExpression(DruidExpression.nullLiteral()),
DruidExpression.fromExpression(DruidExpression.stringLiteral(plannerContext.getTimeZone().getID()))
DruidExpression.ofLiteral(null, DruidExpression.nullLiteral()),
DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())
)
);
@ -175,15 +184,17 @@ public class CastOperatorConversion implements SqlOperatorConversion
private static DruidExpression castDateTimeToChar(
final PlannerContext plannerContext,
final DruidExpression operand,
final SqlTypeName fromType
final SqlTypeName fromType,
final ColumnType toDruidType
)
{
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
toDruidType,
"timestamp_format",
ImmutableList.of(
operand,
DruidExpression.fromExpression(DruidExpression.stringLiteral(dateTimeFormatString(fromType))),
DruidExpression.fromExpression(DruidExpression.stringLiteral(plannerContext.getTimeZone().getID()))
DruidExpression.ofStringLiteral(dateTimeFormatString(fromType)),
DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())
)
);
}

View File

@ -28,6 +28,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
@ -52,10 +53,11 @@ public class CeilOperatorConversion implements SqlOperatorConversion
if (call.getOperands().size() == 1) {
// CEIL(expr) -- numeric CEIL
return OperatorConversions.convertCall(plannerContext, rowSignature, call, "ceil");
return OperatorConversions.convertDirectCall(plannerContext, rowSignature, call, "ceil");
} else if (call.getOperands().size() == 2) {
// CEIL(expr TO timeUnit) -- time CEIL
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"timestamp_ceil",
TimeFloorOperatorConversion.toTimestampFloorOrCeilArgs(plannerContext, rowSignature, call.getOperands())
);

View File

@ -19,18 +19,14 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ConcatOperatorConversion implements SqlOperatorConversion
public class ConcatOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("CONCAT")
@ -39,27 +35,8 @@ public class ConcatOperatorConversion implements SqlOperatorConversion
.functionCategory(SqlFunctionCategory.STRING)
.build();
@Override
public SqlFunction calciteOperator()
public ConcatOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("concat", druidExpressions)
)
);
super(SQL_FUNCTION, "concat");
}
}

View File

@ -24,7 +24,6 @@ import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.java.util.common.StringUtils;
@ -34,8 +33,8 @@ import org.apache.druid.query.filter.DimFilter;
import org.apache.druid.query.filter.SearchQueryDimFilter;
import org.apache.druid.query.search.ContainsSearchQuerySpec;
import org.apache.druid.query.search.SearchQuerySpec;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
@ -51,9 +50,8 @@ import java.util.List;
* translate these functions into {@link SearchQueryDimFilter} with {@link ContainsSearchQuerySpec} as
* search query spec.
*/
public class ContainsOperatorConversion implements SqlOperatorConversion
public class ContainsOperatorConversion extends DirectOperatorConversion
{
private final SqlOperator operator;
private final boolean caseSensitive;
private ContainsOperatorConversion(
@ -61,7 +59,7 @@ public class ContainsOperatorConversion implements SqlOperatorConversion
final boolean caseSensitive
)
{
this.operator = sqlFunction;
super(sqlFunction, StringUtils.toLowerCase(sqlFunction.getName()));
this.caseSensitive = caseSensitive;
}
@ -89,31 +87,6 @@ public class ContainsOperatorConversion implements SqlOperatorConversion
.build();
}
@Override
public SqlOperator calciteOperator()
{
return operator;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
operands -> DruidExpression.fromExpression(DruidExpression.functionCall(
StringUtils.toLowerCase(operator.getName()),
operands
))
);
}
@Nullable
@Override
public DimFilter toDruidFilter(
@ -145,17 +118,14 @@ public class ContainsOperatorConversion implements SqlOperatorConversion
null
);
} else if (virtualColumnRegistry != null) {
VirtualColumn v = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String v = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
druidExpression,
operands.get(0).getType()
);
return new SearchQueryDimFilter(
v.getOutputName(), spec, null, null);
return new SearchQueryDimFilter(v, spec, null, null);
} else {
return null;
}
}
}

View File

@ -35,6 +35,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.joda.time.Period;
@ -106,13 +107,14 @@ public class DateTruncOperatorConversion implements SqlOperatorConversion
throw new IAE("Operator[%s] cannot truncate to[%s]", calciteOperator().getName(), truncType);
}
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"timestamp_floor",
ImmutableList.of(
arg,
DruidExpression.fromExpression(DruidExpression.stringLiteral(truncPeriod.toString())),
DruidExpression.fromExpression(DruidExpression.stringLiteral(null)),
DruidExpression.fromExpression(DruidExpression.stringLiteral(plannerContext.getTimeZone().getID()))
DruidExpression.ofStringLiteral(truncPeriod.toString()),
DruidExpression.ofStringLiteral(null),
DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())
)
);
}

View File

@ -28,6 +28,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
@ -52,10 +53,11 @@ public class FloorOperatorConversion implements SqlOperatorConversion
if (call.getOperands().size() == 1) {
// FLOOR(expr) -- numeric FLOOR
return OperatorConversions.convertCall(plannerContext, rowSignature, call, "floor");
return OperatorConversions.convertDirectCall(plannerContext, rowSignature, call, "floor");
} else if (call.getOperands().size() == 2) {
// FLOOR(expr TO timeUnit) -- time FLOOR
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"timestamp_floor",
TimeFloorOperatorConversion.toTimestampFloorOrCeilArgs(plannerContext, rowSignature, call.getOperands())
);

View File

@ -70,7 +70,7 @@ public class HumanReadableFormatOperatorConversion implements SqlOperatorConvers
final RexNode rexNode
)
{
return OperatorConversions.convertCall(plannerContext, rowSignature, rexNode, name);
return OperatorConversions.convertDirectCall(plannerContext, rowSignature, rexNode, name);
}
private static class HumanReadableFormatOperandTypeChecker implements SqlOperandTypeChecker

View File

@ -30,6 +30,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class LPadOperatorConversion implements SqlOperatorConversion
@ -61,7 +62,8 @@ public class LPadOperatorConversion implements SqlOperatorConversion
rexNode,
druidExpressions -> {
if (druidExpressions.size() > 2) {
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"lpad",
ImmutableList.of(
druidExpressions.get(0),
@ -70,12 +72,13 @@ public class LPadOperatorConversion implements SqlOperatorConversion
)
);
} else {
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"lpad",
ImmutableList.of(
druidExpressions.get(0),
druidExpressions.get(1),
DruidExpression.fromExpression(DruidExpression.stringLiteral(" "))
DruidExpression.ofStringLiteral(" ")
)
);
}

View File

@ -30,6 +30,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class LTrimOperatorConversion implements SqlOperatorConversion
@ -64,13 +65,15 @@ public class LTrimOperatorConversion implements SqlOperatorConversion
return TrimOperatorConversion.makeTrimExpression(
SqlTrimFunction.Flag.LEADING,
druidExpressions.get(0),
druidExpressions.get(1)
druidExpressions.get(1),
Calcites.getColumnTypeForRelDataType(rexNode.getType())
);
} else {
return TrimOperatorConversion.makeTrimExpression(
SqlTrimFunction.Flag.LEADING,
druidExpressions.get(0),
DruidExpression.fromExpression(DruidExpression.stringLiteral(" "))
DruidExpression.ofStringLiteral(" "),
Calcites.getColumnTypeForRelDataType(rexNode.getType())
);
}
}

View File

@ -67,14 +67,11 @@ public class LeftOperatorConversion implements SqlOperatorConversion
if (call.getOperands().size() != 2) {
return null;
}
return OperatorConversions.convertCall(
return OperatorConversions.convertDirectCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("left", druidExpressions)
)
"left"
);
}
}

View File

@ -26,7 +26,6 @@ import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.druid.query.filter.DimFilter;
import org.apache.druid.query.filter.LikeDimFilter;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.DruidExpression;
@ -79,14 +78,13 @@ public class LikeOperatorConversion extends DirectOperatorConversion
druidExpression.getSimpleExtraction().getExtractionFn()
);
} else if (virtualColumnRegistry != null) {
VirtualColumn v = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String v = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
druidExpression,
operands.get(0).getType()
);
return new LikeDimFilter(
v.getOutputName(),
v,
RexLiteral.stringValue(operands.get(1)),
operands.size() > 2 ? RexLiteral.stringValue(operands.get(2)) : null,
null

View File

@ -32,7 +32,6 @@ import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.InputBindings;
import org.apache.druid.math.expr.Parser;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.virtual.ListFilteredVirtualColumn;
@ -41,6 +40,7 @@ import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
@ -341,22 +341,27 @@ public class MultiValueStringOperatorConversions
return null;
}
final StringBuilder builder;
if (isAllowList()) {
builder = new StringBuilder("filter((x) -> array_contains(");
} else {
builder = new StringBuilder("filter((x) -> !array_contains(");
}
final DruidExpression.ExpressionBuilder builder = (args) -> {
final StringBuilder expressionBuilder;
if (isAllowList()) {
expressionBuilder = new StringBuilder("filter((x) -> array_contains(");
} else {
expressionBuilder = new StringBuilder("filter((x) -> !array_contains(");
}
builder.append(druidExpressions.get(1).getExpression())
.append(", x), ")
.append(druidExpressions.get(0).getExpression())
.append(")");
expressionBuilder.append(args.get(1).getExpression())
.append(", x), ")
.append(args.get(0).getExpression())
.append(")");
return expressionBuilder.toString();
};
if (druidExpressions.get(0).isSimpleExtraction()) {
DruidExpression druidExpression = DruidExpression.forVirtualColumn(
builder.toString(),
(name, outputType, macroTable) -> new ListFilteredVirtualColumn(
DruidExpression druidExpression = DruidExpression.ofVirtualColumn(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
builder,
druidExpressions,
(name, outputType, expression, macroTable) -> new ListFilteredVirtualColumn(
name,
druidExpressions.get(0).getSimpleExtraction().toDimensionSpec(druidExpressions.get(0).getDirectColumn(), outputType),
ImmutableSet.copyOf(lit),
@ -367,18 +372,17 @@ public class MultiValueStringOperatorConversions
// if the join expression VC registry is present, it means that this expression is part of a join condition
// and since that's the case, create virtual column here itself for optimized usage in join matching
if (plannerContext.getJoinExpressionVirtualColumnRegistry() != null) {
VirtualColumn vc = plannerContext.getJoinExpressionVirtualColumnRegistry().getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = plannerContext.getJoinExpressionVirtualColumnRegistry().getOrCreateVirtualColumnForExpression(
druidExpression,
ColumnType.STRING
);
return DruidExpression.fromColumn(vc.getOutputName());
return DruidExpression.ofColumn(ColumnType.STRING, virtualColumnName);
}
return druidExpression;
}
return DruidExpression.fromExpression(builder.toString());
return DruidExpression.ofExpression(ColumnType.STRING, builder, druidExpressions);
}
}

View File

@ -19,27 +19,20 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
/**
* Function that converts a String or a Multi Value direct column to an array.
* Input expressions are not supported as one should use the array function for such cases.
**/
public class MultiValueStringToArrayOperatorConversion implements SqlOperatorConversion
public class MultiValueStringToArrayOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("MV_TO_ARRAY")
@ -48,25 +41,8 @@ public class MultiValueStringToArrayOperatorConversion implements SqlOperatorCon
.returnTypeNullableArray(SqlTypeName.VARCHAR)
.build();
@Override
public SqlOperator calciteOperator()
public MultiValueStringToArrayOperatorConversion()
{
return SQL_FUNCTION;
super(SQL_FUNCTION, "mv_to_array");
}
@Nullable
@Override
public DruidExpression toDruidExpression(PlannerContext plannerContext, RowSignature rowSignature, RexNode rexNode)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("mv_to_array", druidExpressions)
)
);
}
}

View File

@ -56,6 +56,6 @@ public class ParseLongOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(plannerContext, rowSignature, rexNode, "parse_long");
return OperatorConversions.convertDirectCall(plannerContext, rowSignature, rexNode, "parse_long");
}
}

View File

@ -24,6 +24,7 @@ import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
@ -32,7 +33,7 @@ import org.apache.druid.sql.calcite.planner.PlannerContext;
public class PositionOperatorConversion implements SqlOperatorConversion
{
private static final DruidExpression ZERO = DruidExpression.fromExpression("0");
private static final DruidExpression ZERO = DruidExpression.ofLiteral(ColumnType.LONG, "0");
@Override
public SqlOperator calciteOperator()
@ -51,26 +52,29 @@ public class PositionOperatorConversion implements SqlOperatorConversion
plannerContext,
rowSignature,
rexNode,
druidExpressions -> {
final DruidExpression fromIndexExpression;
if (druidExpressions.size() > 2) {
fromIndexExpression = DruidExpression.fromExpression(
StringUtils.format("(%s - 1)", druidExpressions.get(2).getExpression())
);
} else {
fromIndexExpression = ZERO;
}
return DruidExpression.fromExpression(
StringUtils.format(
druidExpressions -> DruidExpression.ofExpression(
ColumnType.LONG,
(args) -> {
final DruidExpression fromIndexExpression;
if (args.size() > 2) {
fromIndexExpression = DruidExpression.ofExpression(
ColumnType.LONG,
(_args) -> StringUtils.format("(%s - 1)", _args.get(2).getExpression()),
args
);
} else {
fromIndexExpression = ZERO;
}
return StringUtils.format(
"(%s + 1)",
DruidExpression.functionCall(
"strpos",
ImmutableList.of(druidExpressions.get(1), druidExpressions.get(0), fromIndexExpression)
ImmutableList.of(args.get(1), args.get(0), fromIndexExpression)
)
)
);
}
);
},
druidExpressions
)
);
}
}

View File

@ -65,7 +65,7 @@ public class QueryLookupOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
return OperatorConversions.convertDirectCallWithExtraction(
plannerContext,
rowSignature,
rexNode,

View File

@ -30,6 +30,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class RPadOperatorConversion implements SqlOperatorConversion
@ -61,7 +62,8 @@ public class RPadOperatorConversion implements SqlOperatorConversion
rexNode,
druidExpressions -> {
if (druidExpressions.size() > 2) {
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"rpad",
ImmutableList.of(
druidExpressions.get(0),
@ -70,12 +72,13 @@ public class RPadOperatorConversion implements SqlOperatorConversion
)
);
} else {
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"rpad",
ImmutableList.of(
druidExpressions.get(0),
druidExpressions.get(1),
DruidExpression.fromExpression(DruidExpression.stringLiteral(" "))
DruidExpression.ofStringLiteral(" ")
)
);
}

View File

@ -30,6 +30,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class RTrimOperatorConversion implements SqlOperatorConversion
@ -64,13 +65,15 @@ public class RTrimOperatorConversion implements SqlOperatorConversion
return TrimOperatorConversion.makeTrimExpression(
SqlTrimFunction.Flag.TRAILING,
druidExpressions.get(0),
druidExpressions.get(1)
druidExpressions.get(1),
Calcites.getColumnTypeForRelDataType(rexNode.getType())
);
} else {
return TrimOperatorConversion.makeTrimExpression(
SqlTrimFunction.Flag.TRAILING,
druidExpressions.get(0),
DruidExpression.fromExpression(DruidExpression.stringLiteral(" "))
DruidExpression.ofStringLiteral(" "),
Calcites.getColumnTypeForRelDataType(rexNode.getType())
);
}
}

View File

@ -59,7 +59,7 @@ public class RegexpExtractOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
return OperatorConversions.convertDirectCallWithExtraction(
plannerContext,
rowSignature,
rexNode,

View File

@ -28,7 +28,6 @@ import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.query.filter.DimFilter;
import org.apache.druid.query.filter.RegexDimFilter;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
@ -64,11 +63,11 @@ public class RegexpLikeOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
return OperatorConversions.convertDirectCall(
plannerContext,
rowSignature,
rexNode,
operands -> DruidExpression.fromFunctionCall("regexp_like", operands)
"regexp_like"
);
}
@ -102,13 +101,12 @@ public class RegexpLikeOperatorConversion implements SqlOperatorConversion
null
);
} else if (virtualColumnRegistry != null) {
VirtualColumn v = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String v = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
druidExpression,
operands.get(0).getType()
);
return new RegexDimFilter(v.getOutputName(), pattern, null, null);
return new RegexDimFilter(v, pattern, null, null);
} else {
return null;
}

View File

@ -67,14 +67,11 @@ public class RepeatOperatorConversion implements SqlOperatorConversion
if (call.getOperands().size() != 2) {
return null;
}
return OperatorConversions.convertCall(
return OperatorConversions.convertDirectCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("repeat", druidExpressions)
)
"repeat"
);
}
}

View File

@ -19,19 +19,14 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class ReverseOperatorConversion implements SqlOperatorConversion
public class ReverseOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("REVERSE")
@ -40,27 +35,8 @@ public class ReverseOperatorConversion implements SqlOperatorConversion
.returnTypeCascadeNullable(SqlTypeName.VARCHAR)
.build();
@Override
public SqlOperator calciteOperator()
public ReverseOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("reverse", druidExpressions)
)
);
super(SQL_FUNCTION, "reverse");
}
}

View File

@ -67,14 +67,11 @@ public class RightOperatorConversion implements SqlOperatorConversion
if (call.getOperands().size() != 2) {
return null;
}
return OperatorConversions.convertCall(
return OperatorConversions.convertDirectCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("right", druidExpressions)
)
"right"
);
}
}

View File

@ -49,11 +49,11 @@ public class RoundOperatorConversion implements SqlOperatorConversion
@Override
public DruidExpression toDruidExpression(final PlannerContext plannerContext, final RowSignature rowSignature, final RexNode rexNode)
{
return OperatorConversions.convertCall(plannerContext, rowSignature, rexNode, inputExpressions -> {
return DruidExpression.fromFunctionCall(
"round",
inputExpressions
);
});
return OperatorConversions.convertDirectCall(
plannerContext,
rowSignature,
rexNode,
"round"
);
}
}

View File

@ -58,7 +58,7 @@ public class StringFormatOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(plannerContext, rowSignature, rexNode, "format");
return OperatorConversions.convertDirectCall(plannerContext, rowSignature, rexNode, "format");
}
private static class StringFormatOperandTypeChecker implements SqlOperandTypeChecker

View File

@ -19,20 +19,15 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class StringToArrayOperatorConversion implements SqlOperatorConversion
public class StringToArrayOperatorConversion extends DirectOperatorConversion
{
// note: since this function produces an array
private static final SqlFunction SQL_FUNCTION = OperatorConversions
@ -48,27 +43,8 @@ public class StringToArrayOperatorConversion implements SqlOperatorConversion
.returnTypeNullableArray(SqlTypeName.VARCHAR)
.build();
@Override
public SqlOperator calciteOperator()
public StringToArrayOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("string_to_array", druidExpressions)
)
);
super(SQL_FUNCTION, "string_to_array");
}
}

View File

@ -30,6 +30,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class StrposOperatorConversion implements SqlOperatorConversion
@ -58,12 +59,13 @@ public class StrposOperatorConversion implements SqlOperatorConversion
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
StringUtils.format(
druidExpressions -> DruidExpression.ofExpression(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
(args) -> StringUtils.format(
"(%s + 1)",
DruidExpression.functionCall("strpos", druidExpressions)
)
DruidExpression.functionCall("strpos").buildExpression(args)
),
druidExpressions
)
);
}

View File

@ -19,18 +19,14 @@
package org.apache.druid.sql.calcite.expression.builtin;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.PlannerContext;
public class TextcatOperatorConversion implements SqlOperatorConversion
public class TextcatOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder("textcat")
@ -40,27 +36,8 @@ public class TextcatOperatorConversion implements SqlOperatorConversion
.functionCategory(SqlFunctionCategory.STRING)
.build();
@Override
public SqlFunction calciteOperator()
public TextcatOperatorConversion()
{
return SQL_FUNCTION;
}
@Override
public DruidExpression toDruidExpression(
final PlannerContext plannerContext,
final RowSignature rowSignature,
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.of(
null,
DruidExpression.functionCall("concat", druidExpressions)
)
);
super(SQL_FUNCTION, "concat");
}
}

View File

@ -20,6 +20,7 @@
package org.apache.druid.sql.calcite.expression.builtin;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexNode;
@ -30,10 +31,12 @@ import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import java.util.List;
@ -82,12 +85,15 @@ public abstract class TimeArithmeticOperatorConversion implements SqlOperatorCon
return null;
}
final ColumnType outputType = Calcites.getColumnTypeForRelDataType(rexNode.getType());
if (rightRexNode.getType().getFamily() == SqlTypeFamily.INTERVAL_YEAR_MONTH) {
// timestamp_expr { + | - } <interval_expr> (year-month interval)
// Period is a value in months.
return DruidExpression.fromExpression(
DruidExpression.functionCall(
"timestamp_shift",
return DruidExpression.ofExpression(
outputType,
DruidExpression.functionCall("timestamp_shift"),
ImmutableList.of(
leftExpr,
rightExpr.map(
simpleExtraction -> null,
@ -96,20 +102,22 @@ public abstract class TimeArithmeticOperatorConversion implements SqlOperatorCon
StringUtils.format("'P%sM'", RexLiteral.value(rightRexNode)) :
StringUtils.format("concat('P', %s, 'M')", expression)
),
DruidExpression.fromExpression(DruidExpression.numberLiteral(direction > 0 ? 1 : -1)),
DruidExpression.fromExpression(DruidExpression.stringLiteral(plannerContext.getTimeZone().getID()))
DruidExpression.ofLiteral(ColumnType.LONG, DruidExpression.numberLiteral(direction > 0 ? 1 : -1)),
DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())
)
);
} else if (rightRexNode.getType().getFamily() == SqlTypeFamily.INTERVAL_DAY_TIME) {
// timestamp_expr { + | - } <interval_expr> (day-time interval)
// Period is a value in milliseconds. Ignore time zone.
return DruidExpression.fromExpression(
StringUtils.format(
return DruidExpression.ofExpression(
outputType,
(args) -> StringUtils.format(
"(%s %s %s)",
leftExpr.getExpression(),
args.get(0).getExpression(),
direction > 0 ? "+" : "-",
rightExpr.getExpression()
)
args.get(1).getExpression()
),
ImmutableList.of(leftExpr, rightExpr)
);
} else if ((leftRexNode.getType().getFamily() == SqlTypeFamily.TIMESTAMP ||
leftRexNode.getType().getFamily() == SqlTypeFamily.DATE) &&
@ -120,22 +128,25 @@ public abstract class TimeArithmeticOperatorConversion implements SqlOperatorCon
// the second argument.
Preconditions.checkState(direction < 0, "Time arithmetic require direction < 0");
if (call.getType().getFamily() == SqlTypeFamily.INTERVAL_YEAR_MONTH) {
return DruidExpression.fromExpression(
DruidExpression.functionCall(
"subtract_months",
return DruidExpression.ofExpression(
outputType,
DruidExpression.functionCall("subtract_months"),
ImmutableList.of(
leftExpr,
rightExpr,
DruidExpression.fromExpression(DruidExpression.stringLiteral(plannerContext.getTimeZone().getID()))
DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())
)
);
} else {
return DruidExpression.fromExpression(
StringUtils.format(
"(%s %s %s)",
leftExpr.getExpression(),
"-",
rightExpr.getExpression()
)
return DruidExpression.ofExpression(
outputType,
(args) -> StringUtils.format(
"(%s %s %s)",
args.get(0).getExpression(),
"-",
args.get(1).getExpression()
),
ImmutableList.of(leftExpr, rightExpr)
);
}
} else {

View File

@ -30,6 +30,7 @@ import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
@ -70,6 +71,6 @@ public class TimeCeilOperatorConversion implements SqlOperatorConversion
return null;
}
return DruidExpression.fromFunctionCall("timestamp_ceil", functionArgs);
return DruidExpression.ofFunctionCall(Calcites.getColumnTypeForRelDataType(rexNode.getType()), "timestamp_ceil", functionArgs);
}
}

View File

@ -54,12 +54,13 @@ public class TimeExtractOperatorConversion implements SqlOperatorConversion
final DateTimeZone timeZone
)
{
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
timeExpression.getDruidType(),
"timestamp_extract",
ImmutableList.of(
timeExpression,
DruidExpression.fromExpression(DruidExpression.stringLiteral(unit.name())),
DruidExpression.fromExpression(DruidExpression.stringLiteral(timeZone.getID()))
DruidExpression.ofStringLiteral(unit.name()),
DruidExpression.ofStringLiteral(timeZone.getID())
)
);
}

View File

@ -34,6 +34,7 @@ import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.druid.java.util.common.granularity.PeriodGranularity;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.expression.TimestampFloorExprMacro;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
@ -48,7 +49,6 @@ import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
public class TimeFloorOperatorConversion implements SqlOperatorConversion
{
@ -92,16 +92,20 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
}
}
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
input.getDruidType(),
"timestamp_floor",
ImmutableList.of(
input.getExpression(),
DruidExpression.stringLiteral(granularity.getPeriod().toString()),
DruidExpression.numberLiteral(
granularity.getOrigin() == null ? null : granularity.getOrigin().getMillis()
input,
DruidExpression.ofStringLiteral(granularity.getPeriod().toString()),
DruidExpression.ofLiteral(
ColumnType.LONG,
DruidExpression.numberLiteral(
granularity.getOrigin() == null ? null : granularity.getOrigin().getMillis()
)
),
DruidExpression.stringLiteral(granularity.getTimeZone().toString())
).stream().map(DruidExpression::fromExpression).collect(Collectors.toList())
DruidExpression.ofStringLiteral(granularity.getTimeZone().toString())
)
);
}
@ -134,7 +138,7 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
return null;
}
functionArgs.add(DruidExpression.fromExpression(DruidExpression.stringLiteral(period.toString())));
functionArgs.add(DruidExpression.ofStringLiteral(period.toString()));
} else {
// Other literal types are used by TIME_FLOOR and TIME_CEIL
functionArgs.add(Expressions.toDruidExpression(plannerContext, rowSignature, periodOperand));
@ -147,7 +151,8 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
2,
operand -> {
if (operand.isA(SqlKind.LITERAL)) {
return DruidExpression.fromExpression(
return DruidExpression.ofLiteral(
Calcites.getColumnTypeForRelDataType(operand.getType()),
DruidExpression.numberLiteral(
Calcites.calciteDateTimeLiteralToJoda(operand, plannerContext.getTimeZone()).getMillis()
)
@ -156,7 +161,7 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
return Expressions.toDruidExpression(plannerContext, rowSignature, operand);
}
},
DruidExpression.fromExpression(DruidExpression.nullLiteral())
DruidExpression.ofLiteral(null, DruidExpression.nullLiteral())
)
);
@ -166,7 +171,7 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
operands,
3,
operand -> Expressions.toDruidExpression(plannerContext, rowSignature, operand),
DruidExpression.fromExpression(DruidExpression.stringLiteral(plannerContext.getTimeZone().getID()))
DruidExpression.ofStringLiteral(plannerContext.getTimeZone().getID())
)
);
@ -207,6 +212,10 @@ public class TimeFloorOperatorConversion implements SqlOperatorConversion
return null;
}
return DruidExpression.fromFunctionCall("timestamp_floor", functionArgs);
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"timestamp_floor",
functionArgs
);
}
}

View File

@ -35,11 +35,10 @@ import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.joda.time.DateTimeZone;
import java.util.stream.Collectors;
public class TimeFormatOperatorConversion implements SqlOperatorConversion
{
private static final String DEFAULT_PATTERN = "yyyy-MM-dd'T'HH:mm:ss.SSSZZ";
@ -93,13 +92,14 @@ public class TimeFormatOperatorConversion implements SqlOperatorConversion
plannerContext.getTimeZone()
);
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"timestamp_format",
ImmutableList.of(
timeExpression.getExpression(),
DruidExpression.stringLiteral(pattern),
DruidExpression.stringLiteral(timeZone.getID())
).stream().map(DruidExpression::fromExpression).collect(Collectors.toList())
timeExpression,
DruidExpression.ofStringLiteral(pattern),
DruidExpression.ofStringLiteral(timeZone.getID())
)
);
}
}

View File

@ -34,11 +34,10 @@ import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.joda.time.DateTimeZone;
import java.util.stream.Collectors;
public class TimeParseOperatorConversion implements SqlOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
@ -83,13 +82,14 @@ public class TimeParseOperatorConversion implements SqlOperatorConversion
plannerContext.getTimeZone()
);
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"timestamp_parse",
ImmutableList.of(
timeExpression.getExpression(),
DruidExpression.stringLiteral(pattern),
DruidExpression.stringLiteral(timeZone.getID())
).stream().map(DruidExpression::fromExpression).collect(Collectors.toList())
timeExpression,
DruidExpression.ofStringLiteral(pattern),
DruidExpression.ofStringLiteral(timeZone.getID())
)
);
}
}

View File

@ -34,11 +34,10 @@ import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.joda.time.DateTimeZone;
import java.util.stream.Collectors;
public class TimeShiftOperatorConversion implements SqlOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
@ -92,14 +91,15 @@ public class TimeShiftOperatorConversion implements SqlOperatorConversion
plannerContext.getTimeZone()
);
return DruidExpression.fromFunctionCall(
return DruidExpression.ofFunctionCall(
Calcites.getColumnTypeForRelDataType(rexNode.getType()),
"timestamp_shift",
ImmutableList.of(
timeExpression.getExpression(),
periodExpression.getExpression(),
stepExpression.getExpression(),
DruidExpression.stringLiteral(timeZone.getID())
).stream().map(DruidExpression::fromExpression).collect(Collectors.toList())
timeExpression,
periodExpression,
stepExpression,
DruidExpression.ofStringLiteral(timeZone.getID())
)
);
}
}

View File

@ -26,10 +26,12 @@ import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.fun.SqlTrimFunction;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
@ -40,7 +42,8 @@ public class TrimOperatorConversion implements SqlOperatorConversion
public static DruidExpression makeTrimExpression(
final SqlTrimFunction.Flag trimStyle,
final DruidExpression stringExpression,
final DruidExpression charsExpression
final DruidExpression charsExpression,
final ColumnType druidType
)
{
final String functionName;
@ -61,7 +64,7 @@ public class TrimOperatorConversion implements SqlOperatorConversion
}
// Druid version of trim is multi-function (ltrim/rtrim/trim) and the other two args are swapped.
return DruidExpression.fromFunctionCall(functionName, ImmutableList.of(stringExpression, charsExpression));
return DruidExpression.ofFunctionCall(druidType, functionName, ImmutableList.of(stringExpression, charsExpression));
}
@Override
@ -99,6 +102,11 @@ public class TrimOperatorConversion implements SqlOperatorConversion
return null;
}
return makeTrimExpression(trimStyle, stringExpression, charsExpression);
return makeTrimExpression(
trimStyle,
stringExpression,
charsExpression,
Calcites.getColumnTypeForRelDataType(rexNode.getType())
);
}
}

View File

@ -55,7 +55,7 @@ public class TruncateOperatorConversion implements SqlOperatorConversion
final RexNode rexNode
)
{
return OperatorConversions.convertCall(
return OperatorConversions.convertCallBuilder(
plannerContext,
rowSignature,
rexNode,
@ -77,13 +77,11 @@ public class TruncateOperatorConversion implements SqlOperatorConversion
factorString = StringUtils.format("pow(10,%s)", inputExpressions.get(1));
}
return DruidExpression.fromExpression(
StringUtils.format(
"(cast(cast(%s * %s,'long'),'double') / %s)",
arg.getExpression(),
factorString,
factorString
)
return StringUtils.format(
"(cast(cast(%s * %s,'long'),'double') / %s)",
arg.getExpression(),
factorString,
factorString
);
}
);

View File

@ -163,7 +163,7 @@ public class DruidJoinQueryRel extends DruidRel<DruidJoinQueryRel>
final Pair<String, RowSignature> prefixSignaturePair = computeJoinRowSignature(leftSignature, rightSignature);
VirtualColumnRegistry virtualColumnRegistry = VirtualColumnRegistry.create(prefixSignaturePair.rhs);
VirtualColumnRegistry virtualColumnRegistry = VirtualColumnRegistry.create(prefixSignaturePair.rhs, getPlannerContext().getExprMacroTable());
getPlannerContext().setJoinExpressionVirtualColumnRegistry(virtualColumnRegistry);
// Generate the condition for this join as a Druid expression.

View File

@ -162,7 +162,7 @@ public class DruidQuery
{
final RelDataType outputRowType = partialQuery.leafRel().getRowType();
if (virtualColumnRegistry == null) {
virtualColumnRegistry = VirtualColumnRegistry.create(sourceRowSignature);
virtualColumnRegistry = VirtualColumnRegistry.create(sourceRowSignature, plannerContext.getExprMacroTable());
}
// Now the fun begins.
@ -412,17 +412,14 @@ public class DruidQuery
throw new CannotBuildQueryException(aggregate, rexNode);
}
final VirtualColumn virtualColumn;
final String dimOutputName = outputNamePrefix + outputNameCounter++;
if (!druidExpression.isSimpleExtraction()) {
virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
final String virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
druidExpression,
dataType
);
dimensions.add(DimensionExpression.ofVirtualColumn(
virtualColumn.getOutputName(),
virtualColumn,
dimOutputName,
druidExpression,
outputType
@ -627,6 +624,29 @@ public class DruidQuery
// the various transforms and optimizations
Set<VirtualColumn> virtualColumns = new HashSet<>();
// rewrite any "specialized" virtual column expressions as top level virtual columns so that their native
// implementation can be used instead of being composed as part of some expression tree in an expresson virtual
// column
Set<String> specialized = new HashSet<>();
virtualColumnRegistry.visitAllSubExpressions((expression) -> {
switch (expression.getType()) {
case SPECIALIZED:
// add the expression to the top level of the registry as a standalone virtual column
final String name = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
expression,
expression.getDruidType()
);
specialized.add(name);
// replace with an identifier expression of the new virtual column name
return DruidExpression.ofColumn(expression.getDruidType(), name);
default:
// do nothing
return expression;
}
});
// we always want to add any virtual columns used by the query level DimFilter
if (filter != null) {
for (String columnName : filter.getRequiredColumns()) {
@ -637,7 +657,11 @@ public class DruidQuery
}
if (selectProjection != null) {
virtualColumns.addAll(selectProjection.getVirtualColumns());
for (String columnName : selectProjection.getVirtualColumns()) {
if (virtualColumnRegistry.isVirtualColumnDefined(columnName)) {
virtualColumns.add(virtualColumnRegistry.getVirtualColumn(columnName));
}
}
}
if (grouping != null) {
@ -650,13 +674,18 @@ public class DruidQuery
}
for (Aggregation aggregation : grouping.getAggregations()) {
virtualColumns.addAll(virtualColumnRegistry.findVirtualColumns(aggregation.getRequiredColumns()));
virtualColumns.addAll(virtualColumnRegistry.getAllVirtualColumns(aggregation.getRequiredColumns()));
}
}
if (sorting != null && sorting.getProjection() != null && grouping == null) {
// Sorting without grouping means we might have some post-sort Projection virtual columns.
virtualColumns.addAll(sorting.getProjection().getVirtualColumns());
for (String columnName : sorting.getProjection().getVirtualColumns()) {
if (virtualColumnRegistry.isVirtualColumnDefined(columnName)) {
virtualColumns.add(virtualColumnRegistry.getVirtualColumn(columnName));
}
}
}
if (dataSource instanceof JoinDataSource) {
@ -667,6 +696,12 @@ public class DruidQuery
}
}
for (String columnName : specialized) {
if (virtualColumnRegistry.isVirtualColumnDefined(columnName)) {
virtualColumns.add(virtualColumnRegistry.getVirtualColumn(columnName));
}
}
// sort for predictable output
List<VirtualColumn> columns = new ArrayList<>(virtualColumns);
columns.sort(Comparator.comparing(VirtualColumn::getOutputName));

View File

@ -31,7 +31,6 @@ import org.apache.druid.math.expr.ExpressionType;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.aggregation.post.ExpressionPostAggregator;
import org.apache.druid.query.aggregation.post.FieldAccessPostAggregator;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
@ -65,13 +64,13 @@ public class Projection
private final List<PostAggregator> postAggregators;
@Nullable
private final List<VirtualColumn> virtualColumns;
private final List<String> virtualColumns;
private final RowSignature outputRowSignature;
private Projection(
@Nullable final List<PostAggregator> postAggregators,
@Nullable final List<VirtualColumn> virtualColumns,
@Nullable final List<String> virtualColumns,
final RowSignature outputRowSignature
)
{
@ -255,7 +254,7 @@ public class Projection
}
}
final Set<VirtualColumn> virtualColumns = new HashSet<>();
final Set<String> virtualColumns = new HashSet<>();
final List<String> rowOrder = new ArrayList<>();
for (int i = 0; i < expressions.size(); i++) {
@ -271,13 +270,12 @@ public class Projection
// Refer to column directly when it's a direct access with matching type.
rowOrder.add(expression.getDirectColumn());
} else {
final VirtualColumn virtualColumn = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
plannerContext,
String virtualColumnName = virtualColumnRegistry.getOrCreateVirtualColumnForExpression(
expression,
project.getChildExps().get(i).getType()
);
virtualColumns.add(virtualColumn);
rowOrder.add(virtualColumn.getOutputName());
virtualColumns.add(virtualColumnName);
rowOrder.add(virtualColumnName);
}
}
@ -367,7 +365,7 @@ public class Projection
return Preconditions.checkNotNull(postAggregators, "postAggregators");
}
public List<VirtualColumn> getVirtualColumns()
public List<String> getVirtualColumns()
{
// If you ever see this error, it probably means a Projection was created in post-aggregation mode, but then
// used in a pre-aggregation context. This is likely a bug somewhere in DruidQuery. See class-level Javadocs.

View File

@ -20,19 +20,21 @@
package org.apache.druid.sql.calcite.rel;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
/**
@ -41,29 +43,33 @@ import java.util.stream.Collectors;
*/
public class VirtualColumnRegistry
{
private final ExprMacroTable macroTable;
private final RowSignature baseRowSignature;
private final Map<ExpressionWrapper, VirtualColumn> virtualColumnsByExpression;
private final Map<String, VirtualColumn> virtualColumnsByName;
private final Map<ExpressionAndTypeHint, String> virtualColumnsByExpression;
private final Map<String, ExpressionAndTypeHint> virtualColumnsByName;
private final String virtualColumnPrefix;
private int virtualColumnCounter;
private VirtualColumnRegistry(
RowSignature baseRowSignature,
ExprMacroTable macroTable,
String virtualColumnPrefix,
Map<ExpressionWrapper, VirtualColumn> virtualColumnsByExpression,
Map<String, VirtualColumn> virtualColumnsByName
Map<ExpressionAndTypeHint, String> virtualColumnsByExpression,
Map<String, ExpressionAndTypeHint> virtualColumnsByName
)
{
this.macroTable = macroTable;
this.baseRowSignature = baseRowSignature;
this.virtualColumnPrefix = virtualColumnPrefix;
this.virtualColumnsByExpression = virtualColumnsByExpression;
this.virtualColumnsByName = virtualColumnsByName;
}
public static VirtualColumnRegistry create(final RowSignature rowSignature)
public static VirtualColumnRegistry create(final RowSignature rowSignature, final ExprMacroTable macroTable)
{
return new VirtualColumnRegistry(
rowSignature,
macroTable,
Calcites.findUnusedPrefixForDigits("v", rowSignature.getColumnNames()),
new HashMap<>(),
new HashMap<>()
@ -79,38 +85,151 @@ public class VirtualColumnRegistry
}
/**
* Get existing or create new {@link VirtualColumn} for a given {@link DruidExpression} and {@link ValueType}.
* Get existing or create new {@link VirtualColumn} for a given {@link DruidExpression} and hinted {@link ColumnType}.
*/
public String getOrCreateVirtualColumnForExpression(
DruidExpression expression,
ColumnType typeHint
)
{
final ExpressionAndTypeHint candidate = wrap(expression, typeHint);
if (!virtualColumnsByExpression.containsKey(candidate)) {
final String virtualColumnName = virtualColumnPrefix + virtualColumnCounter++;
virtualColumnsByExpression.put(
candidate,
virtualColumnName
);
virtualColumnsByName.put(
virtualColumnName,
candidate
);
}
return virtualColumnsByExpression.get(candidate);
}
/**
* Get existing or create new {@link VirtualColumn} for a given {@link DruidExpression} and {@link RelDataType}
*/
public String getOrCreateVirtualColumnForExpression(
DruidExpression expression,
RelDataType typeHint
)
{
return getOrCreateVirtualColumnForExpression(
expression,
Calcites.getColumnTypeForRelDataType(typeHint)
);
}
/**
* Get existing virtual column by column name
*/
@Nullable
public VirtualColumn getVirtualColumn(String virtualColumnName)
{
return Optional.ofNullable(virtualColumnsByName.get(virtualColumnName))
.map(v -> v.getExpression().toVirtualColumn(virtualColumnName, v.getTypeHint(), macroTable))
.orElse(null);
}
@Nullable
public String getVirtualColumnByExpression(DruidExpression expression, RelDataType typeHint)
{
return virtualColumnsByExpression.get(wrap(expression, Calcites.getColumnTypeForRelDataType(typeHint)));
}
/**
* Get a signature representing the base signature plus all registered virtual columns.
*/
public RowSignature getFullRowSignature()
{
final RowSignature.Builder builder =
RowSignature.builder().addAll(baseRowSignature);
final RowSignature baseSignature = builder.build();
for (Map.Entry<String, ExpressionAndTypeHint> virtualColumn : virtualColumnsByName.entrySet()) {
final String columnName = virtualColumn.getKey();
// this is expensive, maybe someday it could use the typeHint, or the inferred type, but for now use native
// expression type inference
builder.add(
columnName,
virtualColumn.getValue().getExpression().toVirtualColumn(
columnName,
virtualColumn.getValue().getTypeHint(),
macroTable
).capabilities(baseSignature, columnName).toColumnType()
);
}
return builder.build();
}
/**
* Given a list of column names, find any corresponding {@link VirtualColumn} with the same name
*/
public List<DruidExpression> findVirtualColumnExpressions(List<String> allColumns)
{
return allColumns.stream()
.filter(this::isVirtualColumnDefined)
.map(name -> virtualColumnsByName.get(name).getExpression())
.collect(Collectors.toList());
}
public void visitAllSubExpressions(DruidExpression.DruidExpressionShuttle shuttle)
{
for (Map.Entry<String, ExpressionAndTypeHint> entry : virtualColumnsByName.entrySet()) {
final String key = entry.getKey();
final ExpressionAndTypeHint wrapped = entry.getValue();
virtualColumnsByExpression.remove(wrapped);
final List<DruidExpression> newArgs = shuttle.visitAll(wrapped.getExpression().getArguments());
final ExpressionAndTypeHint newWrapped = wrap(wrapped.getExpression().withArguments(newArgs), wrapped.getTypeHint());
virtualColumnsByName.put(key, newWrapped);
virtualColumnsByExpression.put(newWrapped, key);
}
}
public Collection<? extends VirtualColumn> getAllVirtualColumns(List<String> requiredColumns)
{
return requiredColumns.stream()
.filter(this::isVirtualColumnDefined)
.map(this::getVirtualColumn)
.collect(Collectors.toList());
}
/**
* @deprecated use {@link #findVirtualColumnExpressions(List)} instead
*/
@Deprecated
public List<VirtualColumn> findVirtualColumns(List<String> allColumns)
{
return allColumns.stream()
.filter(this::isVirtualColumnDefined)
.map(this::getVirtualColumn)
.collect(Collectors.toList());
}
/**
* @deprecated use {@link #getOrCreateVirtualColumnForExpression(DruidExpression, ColumnType)} instead
*/
@Deprecated
public VirtualColumn getOrCreateVirtualColumnForExpression(
PlannerContext plannerContext,
DruidExpression expression,
ColumnType valueType
)
{
ExpressionWrapper expressionWrapper = new ExpressionWrapper(expression.getExpression(), valueType);
if (!virtualColumnsByExpression.containsKey(expressionWrapper)) {
final String virtualColumnName = virtualColumnPrefix + virtualColumnCounter++;
final VirtualColumn virtualColumn = expression.toVirtualColumn(
virtualColumnName,
valueType,
plannerContext.getExprMacroTable()
);
virtualColumnsByExpression.put(
expressionWrapper,
virtualColumn
);
virtualColumnsByName.put(
virtualColumnName,
virtualColumn
);
}
return virtualColumnsByExpression.get(expressionWrapper);
final String name = getOrCreateVirtualColumnForExpression(expression, valueType);
return virtualColumnsByName.get(name).expression.toVirtualColumn(name, valueType, macroTable);
}
/**
* Get existing or create new {@link VirtualColumn} for a given {@link DruidExpression} and {@link RelDataType}
* @deprecated use {@link #getOrCreateVirtualColumnForExpression(DruidExpression, RelDataType)} instead
*/
@Deprecated
public VirtualColumn getOrCreateVirtualColumnForExpression(
PlannerContext plannerContext,
DruidExpression expression,
@ -125,59 +244,43 @@ public class VirtualColumnRegistry
}
/**
* Get existing virtual column by column name
* @deprecated use {@link #getVirtualColumnByExpression(DruidExpression, RelDataType)} instead
*/
@Nullable
public VirtualColumn getVirtualColumn(String virtualColumnName)
{
return virtualColumnsByName.get(virtualColumnName);
}
@Deprecated
@Nullable
public VirtualColumn getVirtualColumnByExpression(String expression, RelDataType type)
{
ExpressionWrapper expressionWrapper = new ExpressionWrapper(expression, Calcites.getColumnTypeForRelDataType(type));
return virtualColumnsByExpression.get(expressionWrapper);
final ColumnType columnType = Calcites.getColumnTypeForRelDataType(type);
ExpressionAndTypeHint wrapped = wrap(DruidExpression.fromExpression(expression), columnType);
return Optional.ofNullable(virtualColumnsByExpression.get(wrapped))
.map(this::getVirtualColumn)
.orElse(null);
}
/**
* Get a signature representing the base signature plus all registered virtual columns.
*/
public RowSignature getFullRowSignature()
private static ExpressionAndTypeHint wrap(DruidExpression expression, ColumnType typeHint)
{
final RowSignature.Builder builder =
RowSignature.builder().addAll(baseRowSignature);
RowSignature baseSignature = builder.build();
for (VirtualColumn virtualColumn : virtualColumnsByName.values()) {
final String columnName = virtualColumn.getOutputName();
builder.add(columnName, virtualColumn.capabilities(baseSignature, columnName).toColumnType());
}
return builder.build();
return new ExpressionAndTypeHint(expression, typeHint);
}
/**
* Given a list of column names, find any corresponding {@link VirtualColumn} with the same name
*/
public List<VirtualColumn> findVirtualColumns(List<String> allColumns)
private static class ExpressionAndTypeHint
{
return allColumns.stream()
.filter(this::isVirtualColumnDefined)
.map(this::getVirtualColumn)
.collect(Collectors.toList());
}
private final DruidExpression expression;
private final ColumnType typeHint;
private static class ExpressionWrapper
{
private final String expression;
private final ColumnType valueType;
public ExpressionWrapper(String expression, ColumnType valueType)
public ExpressionAndTypeHint(DruidExpression expression, ColumnType valueType)
{
this.expression = expression;
this.valueType = valueType;
this.typeHint = valueType;
}
public DruidExpression getExpression()
{
return expression;
}
public ColumnType getTypeHint()
{
return typeHint;
}
@Override
@ -189,14 +292,15 @@ public class VirtualColumnRegistry
if (o == null || getClass() != o.getClass()) {
return false;
}
ExpressionWrapper expressionWrapper = (ExpressionWrapper) o;
return Objects.equals(expression, expressionWrapper.expression) && Objects.equals(valueType, expressionWrapper.valueType);
ExpressionAndTypeHint expressionAndTypeHint = (ExpressionAndTypeHint) o;
return Objects.equals(typeHint, expressionAndTypeHint.typeHint) &&
Objects.equals(expression, expressionAndTypeHint.expression);
}
@Override
public int hashCode()
{
return Objects.hash(expression, valueType);
return Objects.hash(expression, typeHint);
}
}
}

View File

@ -513,8 +513,7 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
.build()
),
ImmutableList.of(
new Object[]{"[\"a\",\"b\"]"},
new Object[]{useDefault ? "" : null}
new Object[]{"[\"a\",\"b\"]"}
)
);
}
@ -654,10 +653,10 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
ImmutableList.of(
new Object[]{"", 2, 1L},
new Object[]{"10.1", 2, 1L},
new Object[]{"1", 1, 1L},
new Object[]{"2", 1, 1L},
new Object[]{"abc", 1, 1L},
new Object[]{"def", 1, 1L}
useDefault ? new Object[]{"2", 1, 1L} : new Object[]{"1", 1, 1L},
useDefault ? new Object[]{"1", 0, 1L} : new Object[]{"2", 1, 1L},
new Object[]{"abc", useDefault ? 0 : null, 1L},
new Object[]{"def", useDefault ? 0 : null, 1L}
)
);
}
@ -785,14 +784,14 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
ImmutableList<Object[]> results;
if (useDefault) {
results = ImmutableList.of(
new Object[]{"foo,null", "null,foo", 3L},
new Object[]{"", "", 3L},
new Object[]{"foo,a,b", "a,b,foo", 1L},
new Object[]{"foo,b,c", "b,c,foo", 1L},
new Object[]{"foo,d", "d,foo", 1L}
);
} else {
results = ImmutableList.of(
new Object[]{"foo,null", "null,foo", 2L},
new Object[]{null, null, 2L},
new Object[]{"foo,", ",foo", 1L},
new Object[]{"foo,a,b", "a,b,foo", 1L},
new Object[]{"foo,b,c", "b,c,foo", 1L},
@ -1205,8 +1204,14 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{useDefault ? -1 : null, 4L},
useDefault
? ImmutableList.of(
new Object[]{0, 4L},
new Object[]{-1, 1L},
new Object[]{1, 1L}
)
: ImmutableList.of(
new Object[]{null, 4L},
new Object[]{0, 1L},
new Object[]{1, 1L}
)
@ -1248,8 +1253,15 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{useDefault ? -1 : null, 4L},
useDefault
? ImmutableList.of(
new Object[]{0, 3L},
new Object[]{-1, 1L},
new Object[]{1, 1L},
new Object[]{2, 1L}
)
: ImmutableList.of(
new Object[]{null, 4L},
new Object[]{1, 1L},
new Object[]{2, 1L}
)
@ -1321,14 +1333,12 @@ public class CalciteArraysQueryTest extends BaseCalciteQueryTest
ImmutableList<Object[]> results;
if (useDefault) {
results = ImmutableList.of(
new Object[]{ImmutableList.of("", "d"), 3L},
new Object[]{ImmutableList.of("a", "b", "d"), 1L},
new Object[]{ImmutableList.of("b", "c", "d"), 1L},
new Object[]{ImmutableList.of("d", "d"), 1L}
);
} else {
results = ImmutableList.of(
new Object[]{null, 2L},
new Object[]{ImmutableList.of("", "d"), 1L},
new Object[]{ImmutableList.of("a", "b", "d"), 1L},
new Object[]{ImmutableList.of("b", "c", "d"), 1L},

View File

@ -43,7 +43,6 @@ import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.join.JoinType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -153,8 +152,8 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("country"),
DruidExpression.fromColumn("j0._d0")
makeColumnExpression("country"),
makeColumnExpression("j0._d0")
),
JoinType.LEFT
)
@ -242,8 +241,8 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("country"),
DruidExpression.fromColumn("j0._d0")
makeColumnExpression("country"),
makeColumnExpression("j0._d0")
),
JoinType.LEFT,
selector("city", "B", null)
@ -335,8 +334,8 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("country"),
DruidExpression.fromColumn("j0._d0")
makeColumnExpression("country"),
makeColumnExpression("j0._d0")
),
JoinType.LEFT
)
@ -428,8 +427,8 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("country"),
DruidExpression.fromColumn("j0._d0")
makeColumnExpression("country"),
makeColumnExpression("j0._d0")
),
JoinType.LEFT,
selector("city", "B", null)
@ -521,8 +520,8 @@ public class CalciteCorrelatedQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("country"),
DruidExpression.fromColumn("j0._d0")
makeColumnExpression("country"),
makeColumnExpression("j0._d0")
),
JoinType.LEFT,
selector("city", "B", null)

View File

@ -177,8 +177,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("m1"),
DruidExpression.fromColumn("j0.m1")
DruidExpression.ofColumn(ColumnType.FLOAT, "m1"),
DruidExpression.ofColumn(ColumnType.FLOAT, "j0.m1")
),
JoinType.INNER
)
@ -266,8 +266,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("m1"),
DruidExpression.fromColumn("j0.m1")
DruidExpression.ofColumn(ColumnType.FLOAT, "m1"),
DruidExpression.ofColumn(ColumnType.FLOAT, "j0.m1")
),
JoinType.INNER
)
@ -353,8 +353,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("m1"),
DruidExpression.fromColumn("j0.m1")
DruidExpression.ofColumn(ColumnType.FLOAT, "m1"),
DruidExpression.ofColumn(ColumnType.FLOAT, "j0.m1")
),
JoinType.INNER
)
@ -549,7 +549,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.LEFT
)
)
@ -585,7 +585,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.LEFT
)
)
@ -633,7 +633,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("k"), DruidExpression.fromColumn("j0.dim2")),
equalsCondition(makeColumnExpression("k"), makeColumnExpression("j0.dim2")),
JoinType.RIGHT
)
)
@ -673,7 +673,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.LEFT
)
)
@ -719,7 +719,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.LEFT
)
)
@ -758,7 +758,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.LEFT
)
)
@ -805,7 +805,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("d0"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("d0"), makeColumnExpression("j0.k")),
JoinType.LEFT
)
)
@ -846,7 +846,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.INNER
)
)
@ -878,7 +878,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.INNER
)
)
@ -913,12 +913,12 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.LEFT
),
new LookupDataSource("lookyloo"),
"_j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("_j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("_j0.k")),
JoinType.LEFT
)
)
@ -960,12 +960,12 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("_j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("_j0.k")),
JoinType.INNER
)
)
@ -1002,12 +1002,12 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("_j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("_j0.k")),
JoinType.INNER
)
)
@ -1045,12 +1045,12 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("_j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("_j0.k")),
JoinType.INNER
)
)
@ -1088,12 +1088,12 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("_j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("_j0.k")),
JoinType.INNER
)
)
@ -1166,9 +1166,9 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
"lookyloo"),
"j0.",
equalsCondition(
DruidExpression.fromColumn(
makeColumnExpression(
"dim2"),
DruidExpression.fromColumn(
makeColumnExpression(
"j0.k")
),
JoinType.INNER
@ -1177,9 +1177,9 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
"lookyloo"),
"_j0.",
equalsCondition(
DruidExpression.fromColumn(
makeColumnExpression(
"dim2"),
DruidExpression.fromColumn(
makeColumnExpression(
"_j0.k")
),
JoinType.INNER
@ -1187,9 +1187,9 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"__j0.",
equalsCondition(
DruidExpression.fromColumn(
makeColumnExpression(
"dim2"),
DruidExpression.fromColumn(
makeColumnExpression(
"__j0.k")
),
JoinType.INNER
@ -1197,9 +1197,9 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"___j0.",
equalsCondition(
DruidExpression.fromColumn(
makeColumnExpression(
"dim2"),
DruidExpression.fromColumn(
makeColumnExpression(
"___j0.k")
),
JoinType.INNER
@ -1207,8 +1207,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"____j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn(
makeColumnExpression("dim2"),
makeColumnExpression(
"____j0.k")
),
JoinType.INNER
@ -1216,112 +1216,112 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"_____j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("_____j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("_____j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"______j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("______j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("______j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_______j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("_______j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("_______j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("_________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("_________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"__________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("__________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("__________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"___________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("___________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("___________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"____________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("____________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("____________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_____________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("_____________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("_____________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"______________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("______________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("______________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_______________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("_______________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("_______________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"________________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("________________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("________________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_________________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("_________________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("_________________j0.k")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"__________________j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("__________________j0.k")
makeColumnExpression("dim2"),
makeColumnExpression("__________________j0.k")
),
JoinType.INNER
)
@ -1375,7 +1375,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.d0")),
JoinType.INNER
)
)
@ -1411,7 +1411,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.INNER
)
)
@ -1467,14 +1467,14 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("m1"),
DruidExpression.fromColumn("j0.v0")
DruidExpression.ofColumn(ColumnType.FLOAT, "m1"),
DruidExpression.ofColumn(ColumnType.FLOAT, "j0.v0")
),
JoinType.INNER
),
new LookupDataSource("lookyloo"),
"_j0.",
equalsCondition(DruidExpression.fromColumn("j0.k"), DruidExpression.fromColumn("_j0.k")),
equalsCondition(makeColumnExpression("j0.k"), makeColumnExpression("_j0.k")),
JoinType.INNER
)
)
@ -1514,8 +1514,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(
DruidExpression.fromColumn("k"),
DruidExpression.fromColumn("j0.k")
makeColumnExpression("k"),
makeColumnExpression("j0.k")
),
JoinType.INNER
),
@ -1529,8 +1529,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"_j0.",
equalsCondition(
DruidExpression.fromExpression("CAST(\"j0.k\", 'DOUBLE')"),
DruidExpression.fromColumn("_j0.m1")
makeExpression(ColumnType.DOUBLE, "CAST(\"j0.k\", 'DOUBLE')"),
DruidExpression.ofColumn(ColumnType.DOUBLE, "_j0.m1")
),
JoinType.INNER
)
@ -1579,8 +1579,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("k"),
DruidExpression.fromColumn("j0.dim1")
makeColumnExpression("k"),
makeColumnExpression("j0.dim1")
),
JoinType.INNER
),
@ -1594,8 +1594,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"_j0.",
equalsCondition(
DruidExpression.fromColumn("k"),
DruidExpression.fromColumn("_j0.dim1")
makeColumnExpression("k"),
makeColumnExpression("_j0.dim1")
),
JoinType.INNER
)
@ -1659,8 +1659,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("k"),
DruidExpression.fromColumn("j0.dim1")
makeColumnExpression("k"),
makeColumnExpression("j0.dim1")
),
JoinType.INNER
),
@ -1674,8 +1674,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"_j0.",
equalsCondition(
DruidExpression.fromColumn("j0.dim1"),
DruidExpression.fromColumn("_j0.dim1")
makeColumnExpression("j0.dim1"),
makeColumnExpression("_j0.dim1")
),
JoinType.INNER
)
@ -1736,7 +1736,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.d0")),
JoinType.INNER
)
)
@ -1767,8 +1767,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(
DruidExpression.fromExpression("substring(\"dim2\", 0, 1)"),
DruidExpression.fromColumn("j0.k")
makeExpression("substring(\"dim2\", 0, 1)"),
makeColumnExpression("j0.k")
),
JoinType.INNER
)
@ -1877,8 +1877,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("cnt"),
DruidExpression.fromColumn("j0.v0")
DruidExpression.ofColumn(ColumnType.LONG, "cnt"),
DruidExpression.ofColumn(ColumnType.LONG, "j0.v0")
),
JoinType.INNER
),
@ -1893,8 +1893,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"_j0.",
equalsCondition(
DruidExpression.fromExpression("CAST(\"j0.k\", 'LONG')"),
DruidExpression.fromColumn("_j0.cnt")
makeExpression(ColumnType.LONG, "CAST(\"j0.k\", 'LONG')"),
DruidExpression.ofColumn(ColumnType.LONG, "_j0.cnt")
),
JoinType.INNER
)
@ -1930,8 +1930,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(
DruidExpression.fromExpression("CAST(\"m1\", 'STRING')"),
DruidExpression.fromColumn("j0.k")
makeExpression("CAST(\"m1\", 'STRING')"),
makeColumnExpression("j0.k")
),
JoinType.INNER
)
@ -1974,7 +1974,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("m1"), DruidExpression.fromColumn("j0.v0")),
equalsCondition(DruidExpression.ofColumn(ColumnType.FLOAT, "m1"), DruidExpression.ofColumn(ColumnType.FLOAT, "j0.v0")),
JoinType.INNER
)
)
@ -2018,7 +2018,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("m1"), DruidExpression.fromColumn("j0.v0")),
equalsCondition(DruidExpression.ofColumn(ColumnType.FLOAT, "m1"), DruidExpression.ofColumn(ColumnType.FLOAT, "j0.v0")),
JoinType.INNER
)
)
@ -2050,8 +2050,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(
DruidExpression.fromExpression("substring(\"dim2\", 0, 1)"),
DruidExpression.fromColumn("j0.k")
makeExpression("substring(\"dim2\", 0, 1)"),
makeColumnExpression("j0.k")
),
JoinType.INNER
)
@ -2096,7 +2096,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.v0")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.v0")),
JoinType.INNER
)
)
@ -2130,12 +2130,12 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.k")),
JoinType.LEFT
),
new LookupDataSource("lookyloo"),
"_j0.",
equalsCondition(DruidExpression.fromColumn("j0.k"), DruidExpression.fromColumn("_j0.k")),
equalsCondition(makeColumnExpression("j0.k"), makeColumnExpression("_j0.k")),
JoinType.LEFT
)
)
@ -2175,17 +2175,17 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.LEFT
),
new LookupDataSource("lookyloo"),
"_j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("_j0.k")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("_j0.k")),
JoinType.LEFT
),
new LookupDataSource("lookyloo"),
"__j0.",
equalsCondition(DruidExpression.fromColumn("_j0.k"), DruidExpression.fromColumn("__j0.k")),
equalsCondition(makeColumnExpression("_j0.k"), makeColumnExpression("__j0.k")),
JoinType.LEFT
)
)
@ -2221,7 +2221,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.LEFT
)
)
@ -2258,7 +2258,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.RIGHT
)
)
@ -2293,7 +2293,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.FULL
)
)
@ -2346,8 +2346,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("__time"),
DruidExpression.fromColumn("j0.a0")
DruidExpression.ofColumn(ColumnType.LONG, "__time"),
DruidExpression.ofColumn(ColumnType.LONG, "j0.a0")
),
JoinType.INNER
)
@ -2486,8 +2486,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromExpression("substring(\"dim2\", 0, 1)"),
DruidExpression.fromColumn("j0.d0")
makeExpression("substring(\"dim2\", 0, 1)"),
makeColumnExpression("j0.d0")
),
JoinType.INNER
)
@ -2521,8 +2521,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(
DruidExpression.fromColumn("dim1"),
DruidExpression.fromColumn("j0.k")
makeColumnExpression("dim1"),
makeColumnExpression("j0.k")
),
JoinType.INNER
)
@ -2556,8 +2556,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(
DruidExpression.fromColumn("dim3"),
DruidExpression.fromColumn("j0.k")
makeColumnExpression("dim3"),
makeColumnExpression("j0.k")
),
JoinType.INNER
)
@ -2631,7 +2631,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("v0"), DruidExpression.fromColumn("j0.v0")),
equalsCondition(makeColumnExpression("v0"), makeColumnExpression("j0.v0")),
JoinType.LEFT
)
)
@ -2685,7 +2685,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromExpression("'10.1'"), DruidExpression.fromColumn("j0.v0")),
equalsCondition(makeExpression("'10.1'"), makeColumnExpression("j0.v0")),
JoinType.LEFT,
selector("dim1", "10.1", null)
)
@ -2744,7 +2744,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("v0"), DruidExpression.fromColumn("j0.dim1")),
equalsCondition(makeColumnExpression("v0"), makeColumnExpression("j0.dim1")),
JoinType.LEFT
)
)
@ -2791,8 +2791,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromExpression("'10.1'"),
DruidExpression.fromColumn("j0.dim1")
makeExpression("'10.1'"),
makeColumnExpression("j0.dim1")
),
JoinType.LEFT,
selector("dim1", "10.1", null)
@ -2847,7 +2847,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("v0"), DruidExpression.fromColumn("j0.dim1")),
equalsCondition(makeColumnExpression("v0"), makeColumnExpression("j0.dim1")),
JoinType.LEFT
)
)
@ -2892,8 +2892,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromExpression("'10.1'"),
DruidExpression.fromColumn("j0.dim1")
makeExpression("'10.1'"),
makeColumnExpression("j0.dim1")
),
JoinType.LEFT,
selector("dim1", "10.1", null)
@ -2940,7 +2940,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("v0"), DruidExpression.fromColumn("j0.dim1")),
equalsCondition(makeColumnExpression("v0"), makeColumnExpression("j0.dim1")),
JoinType.INNER
)
)
@ -2995,8 +2995,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromExpression("'10.1'"),
DruidExpression.fromColumn("j0.dim1")
makeExpression("'10.1'"),
makeColumnExpression("j0.dim1")
),
JoinType.INNER,
selector("dim1", "10.1", null)
@ -3051,7 +3051,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("v0"), DruidExpression.fromColumn("j0.dim1")),
equalsCondition(makeColumnExpression("v0"), makeColumnExpression("j0.dim1")),
JoinType.INNER
)
)
@ -3097,8 +3097,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromExpression("'10.1'"),
DruidExpression.fromColumn("j0.dim1")
makeExpression("'10.1'"),
makeColumnExpression("j0.dim1")
),
JoinType.INNER,
selector("dim1", "10.1", null)
@ -3238,7 +3238,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.d0")),
JoinType.INNER
)
)
@ -3264,7 +3264,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.d0")),
JoinType.LEFT
)
)
@ -3334,7 +3334,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.d0")),
JoinType.LEFT
)
)
@ -3374,7 +3374,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.context(QUERY_CONTEXT_DEFAULT)
.build()),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.dim1")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.dim1")),
JoinType.LEFT
)
)
@ -3428,10 +3428,13 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
"j0.",
StringUtils.format(
"(%s && %s)",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(
DruidExpression.fromExpression("'abc'"),
DruidExpression.fromColumn("j0.d0")
makeColumnExpression("dim1"),
makeColumnExpression("j0.d0")
),
equalsCondition(
makeExpression("'abc'"),
makeColumnExpression("j0.d0")
)
),
JoinType.INNER
@ -3474,7 +3477,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.d0")),
JoinType.INNER
)
)
@ -3757,7 +3760,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.d0")),
JoinType.INNER
)
)
@ -3838,7 +3841,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.INNER
))
.intervals(querySegmentSpec(Filtration.eternity()))
@ -3887,7 +3890,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.INNER
))
.intervals(querySegmentSpec(Filtration.eternity()))
@ -3919,7 +3922,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.LEFT
)
)
@ -3934,7 +3937,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.INNER
))
.intervals(querySegmentSpec(Filtration.eternity()))
@ -3991,8 +3994,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("j0.d0")
makeColumnExpression("dim2"),
makeColumnExpression("j0.d0")
),
JoinType.INNER
)
@ -4065,8 +4068,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("j0.d0")
makeColumnExpression("dim2"),
makeColumnExpression("j0.d0")
),
JoinType.INNER
)
@ -4167,7 +4170,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new TableDataSource(CalciteTests.DATASOURCE1),
new LookupDataSource("lookyloo"),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.k")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.k")),
JoinType.LEFT
)
)
@ -4223,8 +4226,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("j0.d0")
makeColumnExpression("dim2"),
makeColumnExpression("j0.d0")
),
JoinType.INNER
)
@ -4304,8 +4307,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"_j0.",
equalsCondition(
DruidExpression.fromColumn("dim2"),
DruidExpression.fromColumn("_j0.d0")
makeColumnExpression("dim2"),
makeColumnExpression("_j0.d0")
),
JoinType.LEFT
)
@ -4395,8 +4398,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("dim1"),
DruidExpression.fromColumn("j0.dim1")
makeColumnExpression("dim1"),
makeColumnExpression("j0.dim1")
),
JoinType.INNER
)
@ -4469,8 +4472,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromColumn("dim1"),
DruidExpression.fromColumn("j0.dim1")
makeColumnExpression("dim1"),
makeColumnExpression("j0.dim1")
),
JoinType.INNER
)
@ -4521,7 +4524,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.d0")),
JoinType.INNER
)
)
@ -4555,8 +4558,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new GlobalTableDataSource(CalciteTests.BROADCAST_DATASOURCE),
"j0.",
equalsCondition(
DruidExpression.fromColumn("dim4"),
DruidExpression.fromColumn("j0.dim4")
makeColumnExpression("dim4"),
makeColumnExpression("j0.dim4")
),
JoinType.INNER
@ -4596,8 +4599,8 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
new GlobalTableDataSource(CalciteTests.BROADCAST_DATASOURCE),
"j0.",
equalsCondition(
DruidExpression.fromColumn("dim4"),
DruidExpression.fromColumn("j0.dim4")
makeColumnExpression("dim4"),
makeColumnExpression("j0.dim4")
),
JoinType.INNER
)
@ -4647,7 +4650,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("v0"), DruidExpression.fromColumn("j0.v0")),
equalsCondition(makeColumnExpression("v0"), makeColumnExpression("j0.v0")),
JoinType.INNER
)
)
@ -4703,7 +4706,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("v0"), DruidExpression.fromColumn("j0.v0")),
equalsCondition(makeColumnExpression("v0"), makeColumnExpression("j0.v0")),
JoinType.INNER
)
)
@ -4719,7 +4722,7 @@ public class CalciteJoinQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("v0"), DruidExpression.fromColumn("j0.v0")),
equalsCondition(makeColumnExpression("v0"), makeColumnExpression("j0.v0")),
JoinType.INNER
)
)

View File

@ -263,10 +263,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{"[\"a\",\"b\"]"},
new Object[]{useDefault ? "" : null}
)
ImmutableList.of(new Object[]{"[\"a\",\"b\"]"})
);
}
@ -403,10 +400,10 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
ImmutableList.of(
new Object[]{"", 2, 1L},
new Object[]{"10.1", 2, 1L},
new Object[]{"1", 1, 1L},
new Object[]{"2", 1, 1L},
new Object[]{"abc", 1, 1L},
new Object[]{"def", 1, 1L}
useDefault ? new Object[]{"2", 1, 1L} : new Object[]{"1", 1, 1L},
useDefault ? new Object[]{"1", 0, 1L} : new Object[]{"2", 1, 1L},
new Object[]{"abc", useDefault ? 0 : null, 1L},
new Object[]{"def", useDefault ? 0 : null, 1L}
)
);
}
@ -540,14 +537,14 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
ImmutableList<Object[]> results;
if (useDefault) {
results = ImmutableList.of(
new Object[]{"foo,null", "null,foo", 3L},
new Object[]{"", "", 3L},
new Object[]{"foo,a,b", "a,b,foo", 1L},
new Object[]{"foo,b,c", "b,c,foo", 1L},
new Object[]{"foo,d", "d,foo", 1L}
);
} else {
results = ImmutableList.of(
new Object[]{"foo,null", "null,foo", 2L},
new Object[]{null, null, 2L},
new Object[]{"foo,", ",foo", 1L},
new Object[]{"foo,a,b", "a,b,foo", 1L},
new Object[]{"foo,b,c", "b,c,foo", 1L},
@ -830,8 +827,14 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{useDefault ? -1 : null, 4L},
useDefault
? ImmutableList.of(
new Object[]{0, 4L},
new Object[]{-1, 1L},
new Object[]{1, 1L}
)
: ImmutableList.of(
new Object[]{null, 4L},
new Object[]{0, 1L},
new Object[]{1, 1L}
)
@ -873,8 +876,15 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
new Object[]{useDefault ? -1 : null, 4L},
useDefault
? ImmutableList.of(
new Object[]{0, 3L},
new Object[]{-1, 1L},
new Object[]{1, 1L},
new Object[]{2, 1L}
)
: ImmutableList.of(
new Object[]{null, 4L},
new Object[]{1, 1L},
new Object[]{2, 1L}
)
@ -946,8 +956,7 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
ImmutableList<Object[]> results;
if (useDefault) {
results = ImmutableList.of(
new Object[]{"d", 7L},
new Object[]{"", 3L},
new Object[]{"d", 4L},
new Object[]{"b", 2L},
new Object[]{"a", 1L},
new Object[]{"c", 1L}
@ -955,7 +964,6 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
} else {
results = ImmutableList.of(
new Object[]{"d", 5L},
new Object[]{null, 2L},
new Object[]{"b", 2L},
new Object[]{"", 1L},
new Object[]{"a", 1L},
@ -1116,8 +1124,14 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
.setVirtualColumns(
expressionVirtualColumn(
"v0",
"array_length(filter((x) -> array_contains(array('b'), x), \"dim3\"))",
"array_length(\"v1\")",
ColumnType.LONG
),
new ListFilteredVirtualColumn(
"v1",
DefaultDimensionSpec.of("dim3"),
ImmutableSet.of("b"),
true
)
)
.setDimensions(
@ -1137,9 +1151,16 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
useDefault ? ImmutableList.of(
new Object[]{0, 4L},
new Object[]{1, 2L}
) : ImmutableList.of(
// the fallback expression would actually produce 3 rows, 2 nulls, 2 0's, and 2 1s
// instead of 4 nulls and two 1's we get when using the 'native' list filtered virtual column
// this is because of slight differences between filter and the native
// selector, which treats a 0 length array as null instead of an empty array like is produced by filter
new Object[]{null, 4L},
new Object[]{1, 2L}
)
);
}
@ -1160,8 +1181,14 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
.setVirtualColumns(
expressionVirtualColumn(
"v0",
"array_length(filter((x) -> !array_contains(array('b'), x), \"dim3\"))",
"array_length(\"v1\")",
ColumnType.LONG
),
new ListFilteredVirtualColumn(
"v1",
DefaultDimensionSpec.of("dim3"),
ImmutableSet.of("b"),
false
)
)
.setDimensions(
@ -1181,7 +1208,9 @@ public class CalciteMultiValueStringQueryTest extends BaseCalciteQueryTest
.setContext(QUERY_CONTEXT_DEFAULT)
.build()
),
useDefault ? ImmutableList.of(new Object[]{1, 6L}) : ImmutableList.of(new Object[]{1, 4L}, new Object[]{0, 2L})
useDefault
? ImmutableList.of(new Object[]{0, 3L}, new Object[]{1, 3L})
: ImmutableList.of(new Object[]{1, 4L}, new Object[]{null, 2L})
);
}

View File

@ -6858,8 +6858,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromExpression("substring(\"dim2\", 0, 1)"),
DruidExpression.fromColumn("j0.d0")
makeExpression("substring(\"dim2\", 0, 1)"),
DruidExpression.ofColumn(ColumnType.STRING, "j0.d0")
),
JoinType.INNER
)
@ -10852,8 +10852,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
"j0.",
StringUtils.format(
"(%s && %s)",
equalsCondition(DruidExpression.fromColumn("dim1"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.p0"))
equalsCondition(makeColumnExpression("dim1"), makeColumnExpression("j0.d0")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.p0"))
),
JoinType.INNER
)
@ -10899,7 +10899,7 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
.build()
),
"j0.",
equalsCondition(DruidExpression.fromColumn("dim2"), DruidExpression.fromColumn("j0.d0")),
equalsCondition(makeColumnExpression("dim2"), makeColumnExpression("j0.d0")),
JoinType.INNER
)
)
@ -11629,8 +11629,8 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
),
"j0.",
equalsCondition(
DruidExpression.fromExpression("substring(\"dim2\", 0, 1)"),
DruidExpression.fromColumn("j0.d0")
makeExpression("substring(\"dim2\", 0, 1)"),
DruidExpression.ofColumn(ColumnType.STRING, "j0.d0")
),
JoinType.INNER
)

View File

@ -34,6 +34,7 @@ import org.apache.druid.data.input.MapBasedRow;
import org.apache.druid.math.expr.ExprEval;
import org.apache.druid.math.expr.InputBindings;
import org.apache.druid.math.expr.Parser;
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.filter.DimFilter;
import org.apache.druid.query.filter.ValueMatcher;
import org.apache.druid.segment.RowAdapters;
@ -52,6 +53,7 @@ import org.apache.druid.sql.calcite.schema.NamedDruidSchema;
import org.apache.druid.sql.calcite.schema.NamedViewSchema;
import org.apache.druid.sql.calcite.schema.ViewSchema;
import org.apache.druid.sql.calcite.table.RowSignatures;
import org.apache.druid.sql.calcite.util.CalciteTestBase;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.easymock.EasyMock;
import org.joda.time.DateTime;
@ -205,7 +207,7 @@ class ExpressionTestHelper
.map(ExpressionTestHelper::quoteIfNeeded)
.collect(Collectors.joining(","));
List<String> elements = Arrays.asList(functionName, "(", argsString, ")");
return DruidExpression.fromExpression(String.join(noDelimiter, elements));
return CalciteTestBase.makeExpression(String.join(noDelimiter, elements));
}
private static String quoteIfNeeded(@Nullable Object arg)
@ -253,14 +255,45 @@ class ExpressionTestHelper
testExpression(rexBuilder.makeCall(op, exprs), expectedExpression, expectedResult);
}
/**
* @deprecated use {@link #testExpression(SqlOperator, RexNode, DruidExpression, Object)} instead which does a
* deep comparison of {@link DruidExpression} instead of just comparing the output of
* {@link DruidExpression#getExpression()}
*/
@Deprecated
void testExpressionString(
final SqlOperator op,
final List<? extends RexNode> exprs,
final DruidExpression expectedExpression,
final Object expectedResult
)
{
testExpression(rexBuilder.makeCall(op, exprs), expectedExpression, expectedResult, false);
}
void testExpression(
final RexNode rexNode,
final DruidExpression expectedExpression,
final Object expectedResult
)
{
testExpression(rexNode, expectedExpression, expectedResult, true);
}
void testExpression(
final RexNode rexNode,
final DruidExpression expectedExpression,
final Object expectedResult,
final boolean deepCompare
)
{
DruidExpression expression = Expressions.toDruidExpression(PLANNER_CONTEXT, rowSignature, rexNode);
Assert.assertEquals("Expression for: " + rexNode, expectedExpression, expression);
Assert.assertNotNull(expression);
if (deepCompare) {
Assert.assertEquals("Expression for: " + rexNode, expectedExpression, expression);
} else {
Assert.assertEquals("Expression for: " + rexNode, expectedExpression.getExpression(), expression.getExpression());
}
ExprEval<?> result = Parser.parse(expression.getExpression(), PLANNER_CONTEXT.getExprMacroTable())
.eval(InputBindings.withMap(bindings));
@ -277,7 +310,7 @@ class ExpressionTestHelper
)
{
final RexNode rexNode = rexBuilder.makeCall(op, exprs);
final VirtualColumnRegistry virtualColumnRegistry = VirtualColumnRegistry.create(rowSignature);
final VirtualColumnRegistry virtualColumnRegistry = VirtualColumnRegistry.create(rowSignature, TestExprMacroTable.INSTANCE);
final DimFilter filter = Expressions.toFilter(PLANNER_CONTEXT, rowSignature, virtualColumnRegistry, rexNode);
Assert.assertEquals("Filter for: " + rexNode, expectedFilter, filter);

Some files were not shown because too many files have changed in this diff Show More