diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java index e05d1549a42..c9b46d678e5 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java @@ -57,8 +57,7 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; +import java.util.LinkedHashMap; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -113,7 +112,7 @@ public class DruidSchemaInternRowSignatureBenchmark protected Sequence runSegmentMetadataQuery(Iterable segments) { final int numColumns = 1000; - Map columnToAnalysisMap = new HashMap<>(); + LinkedHashMap columnToAnalysisMap = new LinkedHashMap<>(); for (int i = 0; i < numColumns; ++i) { columnToAnalysisMap.put( "col" + i, diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java index ca26ce69503..88cc5dcb8fe 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java @@ -58,8 +58,8 @@ import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.IOException; import java.util.EnumSet; +import java.util.LinkedHashMap; import java.util.Map; -import java.util.TreeMap; public class SegmentAnalyzer { @@ -98,7 +98,8 @@ public class SegmentAnalyzer // get length and column names from storageAdapter final int length = storageAdapter.getNumRows(); - Map columns = new TreeMap<>(); + // Use LinkedHashMap to preserve column order. + final Map columns = new LinkedHashMap<>(); final RowSignature rowSignature = storageAdapter.getRowSignature(); for (String columnName : rowSignature.getColumnNames()) { diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 1bb24ef2e71..45cc18ff5a3 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -54,17 +54,19 @@ import org.apache.druid.query.metadata.metadata.ColumnAnalysis; import org.apache.druid.query.metadata.metadata.SegmentAnalysis; import org.apache.druid.query.metadata.metadata.SegmentMetadataQuery; import org.apache.druid.timeline.LogicalSegment; +import org.apache.druid.timeline.SegmentId; import org.joda.time.DateTime; import org.joda.time.Interval; +import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.TreeMap; import java.util.function.BinaryOperator; public class SegmentMetadataQueryQueryToolChest extends QueryToolChest @@ -108,7 +110,8 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest updatedQueryPlus = queryPlus.withQuery(updatedQuery); return new MappedSequence<>( CombiningSequence.create( @@ -135,7 +138,12 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest createMergeFn(Query query) { - return (arg1, arg2) -> mergeAnalyses(arg1, arg2, ((SegmentMetadataQuery) query).isLenientAggregatorMerge()); + return (arg1, arg2) -> mergeAnalyses( + Iterables.getFirst(query.getDataSource().getTableNames(), null), + arg1, + arg2, + ((SegmentMetadataQuery) query).isLenientAggregatorMerge() + ); } @Override @@ -246,8 +254,9 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest newIntervals = null; if (arg1.getIntervals() != null) { newIntervals = new ArrayList<>(arg1.getIntervals()); @@ -272,7 +294,7 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest leftColumns = arg1.getColumns(); final Map rightColumns = arg2.getColumns(); - Map columns = new TreeMap<>(); + final LinkedHashMap columns = new LinkedHashMap<>(); Set rightColumnNames = Sets.newHashSet(rightColumns.keySet()); for (Map.Entry entry : leftColumns.entrySet()) { diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index c07ab5d6b7a..bc7dc9339b9 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -52,9 +52,9 @@ import org.joda.time.Interval; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.TreeMap; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -98,7 +98,7 @@ public class SegmentMetadataQueryRunnerFactory implements QueryRunnerFactory columns = new TreeMap<>(); + LinkedHashMap columns = new LinkedHashMap<>(); ColumnIncluderator includerator = updatedQuery.getToInclude(); for (Map.Entry entry : analyzedColumns.entrySet()) { final String columnName = entry.getKey(); diff --git a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java index 71421366b4b..13576a6a11f 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java @@ -26,6 +26,7 @@ import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.query.aggregation.AggregatorFactory; import org.joda.time.Interval; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -42,7 +43,12 @@ public class SegmentAnalysis implements Comparable */ private final String id; private final List interval; - private final Map columns; + + /** + * Require LinkedHashMap to emphasize how important column order is. It's used by DruidSchema to keep + * SQL column order in line with ingestion column order. + */ + private final LinkedHashMap columns; private final long size; private final long numRows; private final Map aggregators; @@ -54,7 +60,7 @@ public class SegmentAnalysis implements Comparable public SegmentAnalysis( @JsonProperty("id") String id, @JsonProperty("intervals") List interval, - @JsonProperty("columns") Map columns, + @JsonProperty("columns") LinkedHashMap columns, @JsonProperty("size") long size, @JsonProperty("numRows") long numRows, @JsonProperty("aggregators") Map aggregators, @@ -87,7 +93,7 @@ public class SegmentAnalysis implements Comparable } @JsonProperty - public Map getColumns() + public LinkedHashMap getColumns() { return columns; } diff --git a/processing/src/main/java/org/apache/druid/segment/IndexIO.java b/processing/src/main/java/org/apache/druid/segment/IndexIO.java index f593f104eb1..9698ebdc2be 100644 --- a/processing/src/main/java/org/apache/druid/segment/IndexIO.java +++ b/processing/src/main/java/org/apache/druid/segment/IndexIO.java @@ -79,6 +79,7 @@ import java.nio.ByteOrder; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -442,7 +443,7 @@ public class IndexIO { MMappedIndex index = legacyHandler.mapDir(inDir); - Map> columns = new HashMap<>(); + Map> columns = new LinkedHashMap<>(); for (String dimension : index.getAvailableDimensions()) { ColumnBuilder builder = new ColumnBuilder() @@ -624,7 +625,7 @@ public class IndexIO } } - Map> columns = new HashMap<>(); + Map> columns = new LinkedHashMap<>(); // Register the time column ByteBuffer timeBuffer = smooshedFiles.mapFile("__time"); diff --git a/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java b/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java index e773ff78dab..c0c117b8c8a 100644 --- a/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java +++ b/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java @@ -20,7 +20,6 @@ package org.apache.druid.segment; import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.granularity.Granularities; @@ -44,8 +43,9 @@ import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.IOException; import java.io.UncheckedIOException; -import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.Objects; +import java.util.Set; /** * @@ -82,8 +82,14 @@ public class QueryableIndexStorageAdapter implements StorageAdapter @Override public Iterable getAvailableMetrics() { - HashSet columnNames = Sets.newHashSet(index.getColumnNames()); - return Sets.difference(columnNames, Sets.newHashSet(index.getAvailableDimensions())); + // Use LinkedHashSet to preserve the original order. + final Set columnNames = new LinkedHashSet<>(index.getColumnNames()); + + for (final String dimension : index.getAvailableDimensions()) { + columnNames.remove(dimension); + } + + return columnNames; } @Override diff --git a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java index d0719e75647..9dc8ee68475 100644 --- a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java +++ b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java @@ -73,6 +73,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -153,42 +154,44 @@ public class DoubleStorageTest SegmentAnalysis expectedSegmentAnalysisDouble = new SegmentAnalysis( SEGMENT_ID.toString(), ImmutableList.of(INTERVAL), - ImmutableMap.of( - TIME_COLUMN, - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.name(), - false, - false, - 100, - null, - null, - null, - null - ), - DIM_NAME, - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.name(), - false, - false, - 120, - 1, - DIM_VALUE, - DIM_VALUE, - null - ), - DIM_FLOAT_NAME, - new ColumnAnalysis( - ColumnType.DOUBLE, - ValueType.DOUBLE.name(), - false, - false, - 80, - null, - null, - null, - null + new LinkedHashMap<>( + ImmutableMap.of( + TIME_COLUMN, + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.name(), + false, + false, + 100, + null, + null, + null, + null + ), + DIM_NAME, + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.name(), + false, + false, + 120, + 1, + DIM_VALUE, + DIM_VALUE, + null + ), + DIM_FLOAT_NAME, + new ColumnAnalysis( + ColumnType.DOUBLE, + ValueType.DOUBLE.name(), + false, + false, + 80, + null, + null, + null, + null + ) ) ), 330, MAX_ROWS, @@ -201,44 +204,47 @@ public class DoubleStorageTest SegmentAnalysis expectedSegmentAnalysisFloat = new SegmentAnalysis( SEGMENT_ID.toString(), ImmutableList.of(INTERVAL), - ImmutableMap.of( - TIME_COLUMN, - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.name(), - false, - false, - 100, - null, - null, - null, - null - ), - DIM_NAME, - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.name(), - false, - false, - 120, - 1, - DIM_VALUE, - DIM_VALUE, - null - ), - DIM_FLOAT_NAME, - new ColumnAnalysis( - ColumnType.FLOAT, - ValueType.FLOAT.name(), - false, - false, - 80, - null, - null, - null, - null + new LinkedHashMap<>( + ImmutableMap.of( + TIME_COLUMN, + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.name(), + false, + false, + 100, + null, + null, + null, + null + ), + DIM_NAME, + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.name(), + false, + false, + 120, + 1, + DIM_VALUE, + DIM_VALUE, + null + ), + DIM_FLOAT_NAME, + new ColumnAnalysis( + ColumnType.FLOAT, + ValueType.FLOAT.name(), + false, + false, + 80, + null, + null, + null, + null + ) ) - ), 330, + ), + 330, MAX_ROWS, null, null, diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java new file mode 100644 index 00000000000..4f68c9e059d --- /dev/null +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.query.metadata; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import org.apache.druid.data.input.impl.TimestampSpec; +import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.common.granularity.Granularities; +import org.apache.druid.query.aggregation.CountAggregatorFactory; +import org.apache.druid.query.metadata.metadata.ColumnAnalysis; +import org.apache.druid.query.metadata.metadata.SegmentAnalysis; +import org.apache.druid.segment.TestHelper; +import org.apache.druid.segment.column.ColumnType; +import org.junit.Assert; +import org.junit.Test; + +import java.util.LinkedHashMap; + +public class SegmentAnalysisTest +{ + @Test + public void testSerde() throws Exception + { + // Use LinkedHashMap to preserve order. + // We'll verify that the order is actually preserved on serde. + final LinkedHashMap columns = new LinkedHashMap<>(); + columns.put( + "b", + new ColumnAnalysis(ColumnType.LONG, ColumnType.LONG.asTypeString(), true, true, 0, null, null, null, null) + ); + columns.put( + "a", + new ColumnAnalysis(ColumnType.FLOAT, ColumnType.FLOAT.asTypeString(), true, true, 0, null, null, null, null) + ); + columns.put( + "f", + new ColumnAnalysis(ColumnType.STRING, ColumnType.STRING.asTypeString(), true, true, 0, null, null, null, null) + ); + columns.put( + "c", + new ColumnAnalysis(ColumnType.DOUBLE, ColumnType.DOUBLE.asTypeString(), true, true, 0, null, null, null, null) + ); + + final SegmentAnalysis analysis = new SegmentAnalysis( + "id", + Intervals.ONLY_ETERNITY, + columns, + 1, + 2, + ImmutableMap.of("cnt", new CountAggregatorFactory("cnt")), + new TimestampSpec(null, null, null), + Granularities.SECOND, + true + ); + + final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); + final SegmentAnalysis analysis2 = jsonMapper.readValue( + jsonMapper.writeValueAsBytes(analysis), + SegmentAnalysis.class + ); + + Assert.assertEquals(analysis, analysis2); + + // Verify column order is preserved. + Assert.assertEquals( + ImmutableList.copyOf(columns.entrySet()), + ImmutableList.copyOf(analysis2.getColumns().entrySet()) + ); + } +} diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java index 82482541fbe..b8c35917c3f 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java @@ -46,6 +46,7 @@ import org.apache.druid.segment.QueryableIndexSegment; import org.apache.druid.segment.Segment; import org.apache.druid.segment.TestIndex; import org.apache.druid.segment.column.ColumnBuilder; +import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.data.ObjectStrategy; @@ -68,6 +69,7 @@ import java.io.IOException; import java.net.URL; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; @@ -108,9 +110,20 @@ public class SegmentAnalyzerTest extends InitializedNullHandlingTest columns.size() ); // All columns including time and empty/null column - for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) { + // Verify key order is the same as the underlying segment. + // This helps DruidSchema keep things in the proper order when it does SegmentMetadata queries. + final List> entriesInOrder = new ArrayList<>(columns.entrySet()); + + Assert.assertEquals(ColumnHolder.TIME_COLUMN_NAME, entriesInOrder.get(0).getKey()); + Assert.assertEquals(ColumnType.LONG, entriesInOrder.get(0).getValue().getTypeSignature()); + + // Start from 1: skipping __time + for (int i = 0; i < TestIndex.DIMENSION_SCHEMAS.size(); i++) { + final DimensionSchema schema = TestIndex.DIMENSION_SCHEMAS.get(i); + final Map.Entry analysisEntry = entriesInOrder.get(i + 1 /* skip __time */); final String dimension = schema.getName(); - final ColumnAnalysis columnAnalysis = columns.get(dimension); + Assert.assertEquals(dimension, analysisEntry.getKey()); + final ColumnAnalysis columnAnalysis = analysisEntry.getValue(); final boolean isString = schema.getColumnType().is(ValueType.STRING); Assert.assertEquals(dimension, schema.getColumnType().toString(), columnAnalysis.getType()); @@ -161,14 +174,20 @@ public class SegmentAnalyzerTest extends InitializedNullHandlingTest Assert.assertEquals(SegmentId.dummy("test_1").toString(), analysis.getId()); final Map columns = analysis.getColumns(); - Assert.assertEquals( - TestIndex.COLUMNS.length + 3, - columns.size() - ); // All columns including time + // Verify key order is the same as the underlying segment. + // This helps DruidSchema keep things in the proper order when it does SegmentMetadata queries. + final List> entriesInOrder = new ArrayList<>(columns.entrySet()); - for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) { + Assert.assertEquals(ColumnHolder.TIME_COLUMN_NAME, entriesInOrder.get(0).getKey()); + Assert.assertEquals(ColumnType.LONG, entriesInOrder.get(0).getValue().getTypeSignature()); + + // Start from 1: skipping __time + for (int i = 0; i < TestIndex.DIMENSION_SCHEMAS.size(); i++) { + final DimensionSchema schema = TestIndex.DIMENSION_SCHEMAS.get(i); + final Map.Entry analysisEntry = entriesInOrder.get(i + 1 /* skip __time */); final String dimension = schema.getName(); - final ColumnAnalysis columnAnalysis = columns.get(dimension); + Assert.assertEquals(dimension, analysisEntry.getKey()); + final ColumnAnalysis columnAnalysis = analysisEntry.getValue(); final boolean isString = schema.getColumnType().is(ValueType.STRING); Assert.assertEquals(dimension, schema.getColumnType().toString(), columnAnalysis.getType()); Assert.assertEquals(dimension, 0, columnAnalysis.getSize()); @@ -204,6 +223,7 @@ public class SegmentAnalyzerTest extends InitializedNullHandlingTest * *Awesome* method name auto-generated by IntelliJ! I love IntelliJ! * * @param index + * * @return */ private List getSegmentAnalysises(Segment index, EnumSet analyses) @@ -257,6 +277,7 @@ public class SegmentAnalyzerTest extends InitializedNullHandlingTest * (which can happen if an aggregator was removed for a later version), then, * analyzing the segment doesn't fail and the result of analysis of the complex column * is reported as an error. + * * @throws IOException */ @Test @@ -317,7 +338,10 @@ public class SegmentAnalyzerTest extends InitializedNullHandlingTest Assert.assertEquals("error:unknown_complex_invalid_complex_column_type", invalidColumnAnalysis.getErrorMessage()); // Run a segment metadata query also to verify it doesn't break - final List results = getSegmentAnalysises(segment, EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE)); + final List results = getSegmentAnalysises( + segment, + EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE) + ); for (SegmentAnalysis result : results) { Assert.assertTrue(result.getColumns().get(invalid_aggregator).isError()); } diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java index b93c160b290..f6a8e8b4979 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java @@ -46,6 +46,7 @@ import org.junit.Assert; import org.junit.Test; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -77,20 +78,23 @@ public class SegmentMetadataQueryQueryToolChestTest SegmentAnalysis result = new SegmentAnalysis( "testSegment", ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.name(), - true, - false, - 10881, - 1, - "preferred", - "preferred", - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.name(), + true, + false, + 10881, + 1, + "preferred", + "preferred", + null + ) ) - ), 71982, + ), + 71982, 100, null, null, @@ -117,7 +121,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -131,7 +135,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -167,7 +171,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -178,7 +182,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -206,7 +210,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -217,7 +221,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -236,7 +240,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -250,7 +254,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -331,7 +335,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -342,7 +346,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -353,7 +357,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis3 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -364,7 +368,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis4 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -375,7 +379,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis5 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -395,6 +399,7 @@ public class SegmentMetadataQueryQueryToolChestTest { return SegmentMetadataQueryQueryToolChest.finalizeAnalysis( SegmentMetadataQueryQueryToolChest.mergeAnalyses( + null, analysis1, analysis2, false @@ -406,6 +411,7 @@ public class SegmentMetadataQueryQueryToolChestTest { return SegmentMetadataQueryQueryToolChest.finalizeAnalysis( SegmentMetadataQueryQueryToolChest.mergeAnalyses( + null, analysis1, analysis2, true diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java index ded726d5838..0a93dd373ac 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java @@ -69,6 +69,7 @@ import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; @@ -202,42 +203,44 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest expectedSegmentAnalysis1 = new SegmentAnalysis( id1.toString(), ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), - ImmutableMap.of( - "__time", - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.toString(), - false, - false, - 12090, - null, - null, - null, - null - ), - "index", - new ColumnAnalysis( - ColumnType.DOUBLE, - ValueType.DOUBLE.toString(), - false, - false, - 9672, - null, - null, - null, - null - ), - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - preferedSize1, - 1, - "preferred", - "preferred", - null + new LinkedHashMap<>( + ImmutableMap.of( + "__time", + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.toString(), + false, + false, + 12090, + null, + null, + null, + null + ), + "index", + new ColumnAnalysis( + ColumnType.DOUBLE, + ValueType.DOUBLE.toString(), + false, + false, + 9672, + null, + null, + null, + null + ), + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + preferedSize1, + 1, + "preferred", + "preferred", + null + ) ) ), overallSize1, @@ -250,42 +253,44 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest expectedSegmentAnalysis2 = new SegmentAnalysis( id2.toString(), ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), - ImmutableMap.of( - "__time", - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.toString(), - false, - false, - 12090, - null, - null, - null, - null - ), - "index", - new ColumnAnalysis( - ColumnType.DOUBLE, - ValueType.DOUBLE.toString(), - false, - false, - 9672, - null, - null, - null, - null - ), - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - placementSize2, - 1, - null, - null, - null + new LinkedHashMap<>( + ImmutableMap.of( + "__time", + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.toString(), + false, + false, + 12090, + null, + null, + null, + null + ), + "index", + new ColumnAnalysis( + ColumnType.DOUBLE, + ValueType.DOUBLE.toString(), + false, + false, + 9672, + null, + null, + null, + null + ), + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + placementSize2, + 1, + null, + null, + null + ) ) ), // null_column will be included only for incremental index, which makes a little bigger result than expected @@ -313,30 +318,32 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null - ), - "placementish", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - true, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ), + "placementish", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + true, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -385,30 +392,32 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 1, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null - ), - "placementish", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - true, - false, - 0, - 9, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 1, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ), + "placementish", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + true, + false, + 0, + 9, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -457,30 +466,32 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 1, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null - ), - "quality_uniques", - new ColumnAnalysis( - ColumnType.ofComplex("hyperUnique"), - "hyperUnique", - false, - true, - 0, - null, - null, - null, - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 1, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ), + "quality_uniques", + new ColumnAnalysis( + ColumnType.ofComplex("hyperUnique"), + "hyperUnique", + false, + true, + 0, + null, + null, + null, + null + ) ) ), 0, @@ -600,33 +611,35 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), ImmutableList.of(expectedSegmentAnalysis1.getIntervals().get(0)), - ImmutableMap.of( - "__time", - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.toString(), - false, - false, - 12090 * 2, - null, - null, - null, - null - ), - "index", - new ColumnAnalysis( - ColumnType.DOUBLE, - ValueType.DOUBLE.toString(), - false, - false, - 9672 * 2, - null, - null, - null, - null - ), - column, - analysis + new LinkedHashMap<>( + ImmutableMap.of( + "__time", + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.toString(), + false, + false, + 12090 * 2, + null, + null, + null, + null + ), + "index", + new ColumnAnalysis( + ColumnType.DOUBLE, + ValueType.DOUBLE.toString(), + false, + false, + 9672 * 2, + null, + null, + null, + null + ), + column, + analysis + ) ), expectedSegmentAnalysis1.getSize() + expectedSegmentAnalysis2.getSize(), expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), @@ -668,18 +681,20 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -732,18 +747,20 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -792,18 +809,20 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -852,18 +871,20 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -937,7 +958,10 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest TestHelper.assertExpectedObjects( ImmutableList.of(bySegmentResult, bySegmentResult), - myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)))), + myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of( + QueryContexts.BY_SEGMENT_KEY, + true + )))), "failed SegmentMetadata bySegment query" ); exec.shutdownNow(); @@ -1265,12 +1289,12 @@ public class SegmentMetadataQueryTest extends InitializedNullHandlingTest .build(); final byte[] oneColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy( - oneColumnQuery) + oneColumnQuery) .computeCacheKey( oneColumnQuery); final byte[] twoColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy( - twoColumnQuery) + twoColumnQuery) .computeCacheKey( twoColumnQuery); diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java index ead3c9ff62c..b29bfeb4950 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java @@ -44,6 +44,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; @RunWith(Parameterized.class) @@ -80,7 +81,7 @@ public class SegmentMetadataUnionQueryTest extends InitializedNullHandlingTest null ), true, - }, + }, new Object[]{ QueryRunnerTestHelper.makeUnionQueryRunner( FACTORY, @@ -99,18 +100,20 @@ public class SegmentMetadataUnionQueryTest extends InitializedNullHandlingTest SegmentAnalysis expected = new SegmentAnalysis( QueryRunnerTestHelper.SEGMENT_ID.toString(), Collections.singletonList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - mmap ? 43524 : 43056, - 1, - "preferred", - "preferred", - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + mmap ? 43524 : 43056, + 1, + "preferred", + "preferred", + null + ) ) ), mmap ? 805380 : 803324, diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java index 785913b0a6a..898489cce39 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java @@ -73,6 +73,7 @@ import java.io.IOException; import java.util.Comparator; import java.util.EnumSet; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Optional; import java.util.Set; @@ -783,7 +784,9 @@ public class DruidSchema extends AbstractSchema DruidTable buildDruidTable(final String dataSource) { ConcurrentSkipListMap segmentsMap = segmentMetadataInfo.get(dataSource); - final Map columnTypes = new TreeMap<>(); + + // Preserve order. + final Map columnTypes = new LinkedHashMap<>(); if (segmentsMap != null && !segmentsMap.isEmpty()) { for (AvailableSegmentMetadata availableSegmentMetadata : segmentsMap.values()) { diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index 5baa3de83a2..0e7128b0690 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -572,14 +572,6 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "TIMESTAMP"), Pair.of("IS_NULLABLE", "NO") ), - row( - Pair.of("TABLE_SCHEM", "druid"), - Pair.of("TABLE_NAME", "foo"), - Pair.of("COLUMN_NAME", "cnt"), - Pair.of("DATA_TYPE", Types.BIGINT), - Pair.of("TYPE_NAME", "BIGINT"), - Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO") - ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), @@ -604,6 +596,14 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), + row( + Pair.of("TABLE_SCHEM", "druid"), + Pair.of("TABLE_NAME", "foo"), + Pair.of("COLUMN_NAME", "cnt"), + Pair.of("DATA_TYPE", Types.BIGINT), + Pair.of("TYPE_NAME", "BIGINT"), + Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO") + ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), @@ -663,14 +663,6 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "TIMESTAMP"), Pair.of("IS_NULLABLE", "NO") ), - row( - Pair.of("TABLE_SCHEM", "druid"), - Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), - Pair.of("COLUMN_NAME", "cnt"), - Pair.of("DATA_TYPE", Types.BIGINT), - Pair.of("TYPE_NAME", "BIGINT"), - Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO") - ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), @@ -687,6 +679,14 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), + row( + Pair.of("TABLE_SCHEM", "druid"), + Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), + Pair.of("COLUMN_NAME", "cnt"), + Pair.of("DATA_TYPE", Types.BIGINT), + Pair.of("TYPE_NAME", "BIGINT"), + Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO") + ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), @@ -1181,11 +1181,6 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), Pair.of("COLUMN_NAME", "__time") ), - row( - Pair.of("TABLE_SCHEM", "druid"), - Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), - Pair.of("COLUMN_NAME", "cnt") - ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), @@ -1201,6 +1196,11 @@ public abstract class DruidAvaticaHandlerTest extends CalciteTestBase Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), Pair.of("COLUMN_NAME", "dim3") ), + row( + Pair.of("TABLE_SCHEM", "druid"), + Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), + Pair.of("COLUMN_NAME", "cnt") + ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index c04f429ff1a..c6eeb833913 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -126,10 +126,10 @@ public class DruidStatementTest extends CalciteTestBase Assert.assertEquals( Lists.newArrayList( Lists.newArrayList("__time", "TIMESTAMP", "java.lang.Long"), - Lists.newArrayList("cnt", "BIGINT", "java.lang.Number"), Lists.newArrayList("dim1", "VARCHAR", "java.lang.String"), Lists.newArrayList("dim2", "VARCHAR", "java.lang.String"), Lists.newArrayList("dim3", "VARCHAR", "java.lang.String"), + Lists.newArrayList("cnt", "BIGINT", "java.lang.Number"), Lists.newArrayList("m1", "FLOAT", "java.lang.Float"), Lists.newArrayList("m2", "DOUBLE", "java.lang.Double"), Lists.newArrayList("unique_dim1", "OTHER", "java.lang.Object") @@ -166,9 +166,7 @@ public class DruidStatementTest extends CalciteTestBase true, Lists.newArrayList( new Object[]{""}, - new Object[]{ - "1" - }, + new Object[]{"1"}, new Object[]{"10.1"}, new Object[]{"2"}, new Object[]{"abc"}, diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java index f07f60980d5..31246b4bca1 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java @@ -108,8 +108,8 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest + ")"; final String legacyExplanation = "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"query\",\"query\":{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"list\",\"granularity\":{\"type\":\"all\"}}},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}}], signature=[{a0:LONG}])\n" - + " DruidJoinQueryRel(condition=[=(SUBSTRING($3, 1, 1), $8)], joinType=[inner], query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__join__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}}], signature=[{d0:STRING}])\n" - + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n" + + " DruidJoinQueryRel(condition=[=(SUBSTRING($2, 1, 1), $8)], joinType=[inner], query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__join__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}}], signature=[{d0:STRING}])\n" + + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, dim1:STRING, dim2:STRING, dim3:STRING, cnt:LONG, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n" + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}}], signature=[{d0:STRING}])\n"; final String explanation = "[" + "{\"query\":{\"queryType\":\"groupBy\"," @@ -153,8 +153,8 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); - String legacyExplanation = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; - String legacyExplanationWithContext = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"useNativeQueryExplain\":false},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; + String legacyExplanation = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, dim1:STRING, dim2:STRING, dim3:STRING, cnt:LONG, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; + String legacyExplanationWithContext = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"useNativeQueryExplain\":false},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, dim1:STRING, dim2:STRING, dim3:STRING, cnt:LONG, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; String explanation = "[{" + "\"query\":{\"queryType\":\"scan\"," + "\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"}," @@ -164,7 +164,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest + "\"legacy\":false," + "\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}," + "\"granularity\":{\"type\":\"all\"}}," - + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + "}]"; String explanationWithContext = "[{" @@ -176,7 +176,7 @@ public class CalciteExplainQueryTest extends BaseCalciteQueryTest + "\"legacy\":false," + "\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"useNativeQueryExplain\":true,\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}," + "\"granularity\":{\"type\":\"all\"}}," - + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + "}]"; String sql = "EXPLAIN PLAN FOR SELECT * FROM druid.foo"; String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]"; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java index 7040e7fc539..8200fa2dc1f 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java @@ -70,10 +70,10 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest protected static final RowSignature FOO_TABLE_SIGNATURE = RowSignature.builder() .addTimeColumn() - .add("cnt", ColumnType.LONG) .add("dim1", ColumnType.STRING) .add("dim2", ColumnType.STRING) .add("dim3", ColumnType.STRING) + .add("cnt", ColumnType.LONG) .add("m1", ColumnType.FLOAT) .add("m2", ColumnType.DOUBLE) .add("unique_dim1", HyperUniquesAggregatorFactory.TYPE) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 15680504c6d..77e6ca22d23 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -282,10 +282,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of(), ImmutableList.of( new Object[]{"__time", "TIMESTAMP", "NO"}, - new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"}, new Object[]{"dim1", "VARCHAR", "YES"}, new Object[]{"dim2", "VARCHAR", "YES"}, new Object[]{"dim3", "VARCHAR", "YES"}, + new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"}, new Object[]{"m1", "FLOAT", useDefault ? "NO" : "YES"}, new Object[]{"m2", "DOUBLE", useDefault ? "NO" : "YES"}, new Object[]{"unique_dim1", "COMPLEX", "YES"} @@ -313,9 +313,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest ImmutableList.of(), ImmutableList.of( new Object[]{"__time", "TIMESTAMP", "NO"}, - new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"}, new Object[]{"dim1", "VARCHAR", "YES"}, new Object[]{"dim2", "VARCHAR", "YES"}, + new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"}, new Object[]{"m1", "FLOAT", useDefault ? "NO" : "YES"}, new Object[]{"m2", "DOUBLE", useDefault ? "NO" : "YES"}, new Object[]{"unique_dim1", "COMPLEX", "YES"} diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java index 310c2eb8e23..4f39dff99f0 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java @@ -592,9 +592,9 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1.0f, 1.0d, HLLC_STRING}, - new Object[]{timestamp("2001-01-01"), 1L, "1", "a", "", 4.0f, 4.0d, HLLC_STRING}, - new Object[]{timestamp("2001-01-02"), 1L, "def", "abc", NULL_STRING, 5.0f, 5.0d, HLLC_STRING} + new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1.0f, 1.0d, HLLC_STRING}, + new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4.0f, 4.0d, HLLC_STRING}, + new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5.0f, 5.0d, HLLC_STRING} ) ); } @@ -1097,12 +1097,12 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1f, 1.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING, "[\"b\",\"c\"]", 2f, 2.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-01"), 1L, "1", "a", "", 4f, 4.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-02"), 1L, "def", "abc", NULL_STRING, 5f, 5.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-03"), 1L, "abc", NULL_STRING, NULL_STRING, 6f, 6.0, HLLC_STRING} + new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1f, 1.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2f, 2.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6.0, HLLC_STRING} ) ); } @@ -1131,18 +1131,18 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest ImmutableList.of( new Object[]{ timestamp("2000-01-01"), - 1L, "forbidden", "abcd", + 1L, 9999.0f, NullHandling.defaultDoubleValue(), "\"AQAAAQAAAALFBA==\"" }, new Object[]{ timestamp("2000-01-02"), - 1L, "forbidden", "a", + 1L, 1234.0f, NullHandling.defaultDoubleValue(), "\"AQAAAQAAAALFBA==\"" @@ -1271,7 +1271,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest skipVectorize(); final String query = "EXPLAIN PLAN FOR SELECT * FROM druid.foo"; - final String legacyExplanation = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; + final String legacyExplanation = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, dim1:STRING, dim2:STRING, dim3:STRING, cnt:LONG, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; final String explanation = "[{" + "\"query\":{\"queryType\":\"scan\"," + "\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"}," @@ -1281,7 +1281,7 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest + "\"legacy\":false," + "\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}," + "\"granularity\":{\"type\":\"all\"}}," - + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + "}]"; final String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]"; @@ -1328,8 +1328,8 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1.0f, 1.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING, "[\"b\",\"c\"]", 2.0f, 2.0, HLLC_STRING} + new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1.0f, 1.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2.0f, 2.0, HLLC_STRING} ) ); } @@ -1354,8 +1354,8 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING, "[\"b\",\"c\"]", 2.0f, 2.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, HLLC_STRING} + new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2.0f, 2.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, HLLC_STRING} ) ); } @@ -1429,8 +1429,8 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{timestamp("2001-01-03"), 1L, "abc", NULL_STRING, NULL_STRING, 6f, 6d, HLLC_STRING}, - new Object[]{timestamp("2001-01-02"), 1L, "def", "abc", NULL_STRING, 5f, 5d, HLLC_STRING} + new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6d, HLLC_STRING}, + new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5d, HLLC_STRING} ) ); } @@ -1455,12 +1455,12 @@ public class CalciteSelectQueryTest extends BaseCalciteQueryTest .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1f, 1.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING, "[\"b\",\"c\"]", 2f, 2.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-01"), 1L, "1", "a", "", 4f, 4.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-02"), 1L, "def", "abc", NULL_STRING, 5f, 5.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-03"), 1L, "abc", NULL_STRING, NULL_STRING, 6f, 6.0, HLLC_STRING} + new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1f, 1.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2f, 2.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6.0, HLLC_STRING} ) ); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java index 708ba50c4d1..36e575e2bdb 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java @@ -78,6 +78,7 @@ import java.io.File; import java.io.IOException; import java.util.EnumSet; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -206,7 +207,10 @@ public class DruidSchemaTest extends DruidSchemaTestCommon CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate), serverView, segmentManager, - new MapJoinableFactory(ImmutableSet.of(globalTableJoinable), ImmutableMap.of(globalTableJoinable.getClass(), GlobalTableDataSource.class)), + new MapJoinableFactory( + ImmutableSet.of(globalTableJoinable), + ImmutableMap.of(globalTableJoinable.getClass(), GlobalTableDataSource.class) + ), PLANNER_CONFIG_DEFAULT, new NoopEscalator(), new BrokerInternalQueryConfig(), @@ -288,16 +292,16 @@ public class DruidSchemaTest extends DruidSchemaTestCommon Assert.assertEquals("__time", fields.get(0).getName()); Assert.assertEquals(SqlTypeName.TIMESTAMP, fields.get(0).getType().getSqlTypeName()); - Assert.assertEquals("cnt", fields.get(1).getName()); - Assert.assertEquals(SqlTypeName.BIGINT, fields.get(1).getType().getSqlTypeName()); + Assert.assertEquals("dim2", fields.get(1).getName()); + Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(1).getType().getSqlTypeName()); - Assert.assertEquals("dim1", fields.get(2).getName()); - Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(2).getType().getSqlTypeName()); + Assert.assertEquals("m1", fields.get(2).getName()); + Assert.assertEquals(SqlTypeName.BIGINT, fields.get(2).getType().getSqlTypeName()); - Assert.assertEquals("dim2", fields.get(3).getName()); + Assert.assertEquals("dim1", fields.get(3).getName()); Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(3).getType().getSqlTypeName()); - Assert.assertEquals("m1", fields.get(4).getName()); + Assert.assertEquals("cnt", fields.get(4).getName()); Assert.assertEquals(SqlTypeName.BIGINT, fields.get(4).getType().getSqlTypeName()); Assert.assertEquals("unique_dim1", fields.get(5).getName()); @@ -1065,7 +1069,7 @@ public class DruidSchemaTest extends DruidSchemaTestCommon new TableDataSource(segment.getDataSource()), new MultipleSpecificSegmentSpec( segmentIterable.stream() - .map(SegmentId::toDescriptor).collect(Collectors.toList())), + .map(SegmentId::toDescriptor).collect(Collectors.toList())), new AllColumnIncluderator(), false, queryContext, @@ -1094,7 +1098,8 @@ public class DruidSchemaTest extends DruidSchemaTestCommon EasyMock.expect(factoryMock.factorize()).andReturn(lifecycleMock).once(); // This is the mat of the test, making sure that the query created by the method under test matches the expected query, specifically the operator configured context - EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery, AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK)).andReturn(null); + EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery, AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK)) + .andReturn(null); EasyMock.replay(factoryMock, lifecycleMock); @@ -1107,36 +1112,28 @@ public class DruidSchemaTest extends DruidSchemaTestCommon @Test public void testSegmentMetadataColumnType() { + // Verify order is preserved. + final LinkedHashMap columns = new LinkedHashMap<>(); + columns.put( + "a", + new ColumnAnalysis(ColumnType.STRING, ColumnType.STRING.asTypeString(), false, true, 1234, 26, "a", "z", null) + ); + + columns.put( + "count", + new ColumnAnalysis(ColumnType.LONG, ColumnType.LONG.asTypeString(), false, true, 1234, 26, "a", "z", null) + ); + + columns.put( + "b", + new ColumnAnalysis(ColumnType.DOUBLE, ColumnType.DOUBLE.asTypeString(), false, true, 1234, 26, null, null, null) + ); + RowSignature signature = DruidSchema.analysisToRowSignature( new SegmentAnalysis( "id", ImmutableList.of(Intervals.utc(1L, 2L)), - ImmutableMap.of( - "a", - new ColumnAnalysis( - ColumnType.STRING, - ColumnType.STRING.asTypeString(), - false, - true, - 1234, - 26, - "a", - "z", - null - ), - "count", - new ColumnAnalysis( - ColumnType.LONG, - ColumnType.LONG.asTypeString(), - false, - true, - 1234, - 26, - "a", - "z", - null - ) - ), + columns, 1234, 100, null, @@ -1147,7 +1144,11 @@ public class DruidSchemaTest extends DruidSchemaTestCommon ); Assert.assertEquals( - RowSignature.builder().add("a", ColumnType.STRING).add("count", ColumnType.LONG).build(), + RowSignature.builder() + .add("a", ColumnType.STRING) + .add("count", ColumnType.LONG) + .add("b", ColumnType.DOUBLE) + .build(), signature ); } @@ -1160,30 +1161,32 @@ public class DruidSchemaTest extends DruidSchemaTestCommon new SegmentAnalysis( "id", ImmutableList.of(Intervals.utc(1L, 2L)), - ImmutableMap.of( - "a", - new ColumnAnalysis( - null, - ColumnType.STRING.asTypeString(), - false, - true, - 1234, - 26, + new LinkedHashMap<>( + ImmutableMap.of( "a", - "z", - null - ), - "count", - new ColumnAnalysis( - null, - ColumnType.LONG.asTypeString(), - false, - true, - 1234, - 26, - "a", - "z", - null + new ColumnAnalysis( + null, + ColumnType.STRING.asTypeString(), + false, + true, + 1234, + 26, + "a", + "z", + null + ), + "count", + new ColumnAnalysis( + null, + ColumnType.LONG.asTypeString(), + false, + true, + 1234, + 26, + "a", + "z", + null + ) ) ), 1234, diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java index 59a8d0b3e15..3219d3f87d6 100644 --- a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java +++ b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java @@ -123,13 +123,13 @@ public class SqlResourceTest extends CalciteTestBase private static final String DUMMY_SQL_QUERY_ID = "dummy"; private static final List EXPECTED_COLUMNS_FOR_RESULT_FORMAT_TESTS = - Arrays.asList("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", "unique_dim1", "EXPR$8"); + Arrays.asList("__time", "dim1", "dim2", "dim3", "cnt", "m1", "m2", "unique_dim1", "EXPR$8"); private static final List EXPECTED_TYPES_FOR_RESULT_FORMAT_TESTS = - Arrays.asList("LONG", "LONG", "STRING", "STRING", "STRING", "FLOAT", "DOUBLE", "COMPLEX", "STRING"); + Arrays.asList("LONG", "STRING", "STRING", "STRING", "LONG", "FLOAT", "DOUBLE", "COMPLEX", "STRING"); private static final List EXPECTED_SQL_TYPES_FOR_RESULT_FORMAT_TESTS = - Arrays.asList("TIMESTAMP", "BIGINT", "VARCHAR", "VARCHAR", "VARCHAR", "FLOAT", "DOUBLE", "OTHER", "VARCHAR"); + Arrays.asList("TIMESTAMP", "VARCHAR", "VARCHAR", "VARCHAR", "BIGINT", "FLOAT", "DOUBLE", "OTHER", "VARCHAR"); private static QueryRunnerFactoryConglomerate conglomerate; private static Closer resourceCloser; @@ -544,10 +544,10 @@ public class SqlResourceTest extends CalciteTestBase ImmutableList.of( Arrays.asList( "2000-01-01T00:00:00.000Z", - 1, "", "a", "[\"a\",\"b\"]", + 1, 1.0, 1.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -555,10 +555,10 @@ public class SqlResourceTest extends CalciteTestBase ), Arrays.asList( "2000-01-02T00:00:00.000Z", - 1, "10.1", nullStr, "[\"b\",\"c\"]", + 1, 2.0, 2.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -655,10 +655,10 @@ public class SqlResourceTest extends CalciteTestBase EXPECTED_SQL_TYPES_FOR_RESULT_FORMAT_TESTS, Arrays.asList( "2000-01-01T00:00:00.000Z", - 1, "", "a", "[\"a\",\"b\"]", + 1, 1.0, 1.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -666,10 +666,10 @@ public class SqlResourceTest extends CalciteTestBase ), Arrays.asList( "2000-01-02T00:00:00.000Z", - 1, "10.1", nullStr, "[\"b\",\"c\"]", + 1, 2.0, 2.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -723,10 +723,10 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertEquals( Arrays.asList( "2000-01-01T00:00:00.000Z", - 1, "", "a", "[\"a\",\"b\"]", + 1, 1.0, 1.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -737,10 +737,10 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertEquals( Arrays.asList( "2000-01-02T00:00:00.000Z", - 1, "10.1", nullStr, "[\"b\",\"c\"]", + 1, 2.0, 2.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -771,10 +771,10 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertEquals( Arrays.asList( "2000-01-01T00:00:00.000Z", - 1, "", "a", "[\"a\",\"b\"]", + 1, 1.0, 1.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -785,10 +785,10 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertEquals( Arrays.asList( "2000-01-02T00:00:00.000Z", - 1, "10.1", nullStr, "[\"b\",\"c\"]", + 1, 2.0, 2.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -1099,8 +1099,8 @@ public class SqlResourceTest extends CalciteTestBase Assert.assertEquals( ImmutableList.of( - "2000-01-01T00:00:00.000Z,1,,a,\"[\"\"a\"\",\"\"b\"\"]\",1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", - "2000-01-02T00:00:00.000Z,1,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", + "2000-01-01T00:00:00.000Z,,a,\"[\"\"a\"\",\"\"b\"\"]\",1,1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", + "2000-01-02T00:00:00.000Z,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",1,2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", "", "" ), @@ -1124,8 +1124,8 @@ public class SqlResourceTest extends CalciteTestBase String.join(",", EXPECTED_COLUMNS_FOR_RESULT_FORMAT_TESTS), String.join(",", EXPECTED_TYPES_FOR_RESULT_FORMAT_TESTS), String.join(",", EXPECTED_SQL_TYPES_FOR_RESULT_FORMAT_TESTS), - "2000-01-01T00:00:00.000Z,1,,a,\"[\"\"a\"\",\"\"b\"\"]\",1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", - "2000-01-02T00:00:00.000Z,1,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", + "2000-01-01T00:00:00.000Z,,a,\"[\"\"a\"\",\"\"b\"\"]\",1,1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", + "2000-01-02T00:00:00.000Z,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",1,2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", "", "" ),