From 338bdb35ea19b495bc6eb587ab99978edb16ce46 Mon Sep 17 00:00:00 2001 From: Abhishek Radhakrishnan Date: Mon, 22 May 2023 22:55:00 -0700 Subject: [PATCH] Return `RESOURCES` in `EXPLAIN PLAN` as an ordered collection (#14323) * Make resources an ordered collection so it's deterministic. * test cleanup * fixup docs. * Replace deprecated ObjectNode#put() calls with ObjectNode#set(). --- docs/querying/sql-translation.md | 2 +- .../sql/calcite/planner/QueryHandler.java | 14 ++- .../sql/calcite/CalciteInsertDmlTest.java | 116 ++++++++++++++++-- .../sql/calcite/CalciteReplaceDmlTest.java | 15 ++- 4 files changed, 126 insertions(+), 21 deletions(-) diff --git a/docs/querying/sql-translation.md b/docs/querying/sql-translation.md index 4b0b2d8fbc8..7c2876c68d3 100644 --- a/docs/querying/sql-translation.md +++ b/docs/querying/sql-translation.md @@ -66,7 +66,7 @@ The [EXPLAIN PLAN](sql.md#explain-plan) functionality can help you understand ho be translated to native. EXPLAIN PLAN statements return: - a `PLAN` column that contains a JSON array of native queries that Druid will run -- a `RESOURCES` column that describes the resource being queried as well as a `PLAN` column that contains a JSON array of native queries that Druid will run +- a `RESOURCES` column that describes the resources used in the query - a `ATTRIBUTES` column that describes the attributes of a query, such as the statement type and target data source For example, consider the following query: diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java index 5dd94f24730..28e9beffc68 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java @@ -75,6 +75,7 @@ import org.apache.druid.utils.Throwables; import javax.annotation.Nullable; import java.util.ArrayList; +import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -376,8 +377,11 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand } } } - final Set resources = - plannerContext.getResourceActions().stream().map(ResourceAction::getResource).collect(Collectors.toSet()); + final List resources = plannerContext.getResourceActions() + .stream() + .map(ResourceAction::getResource) + .sorted(Comparator.comparing(Resource::getName)) + .collect(Collectors.toList()); resourcesString = plannerContext.getJsonMapper().writeValueAsString(resources); } catch (JsonProcessingException jpe) { @@ -431,9 +435,9 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand for (DruidQuery druidQuery : druidQueryList) { Query nativeQuery = druidQuery.getQuery(); ObjectNode objectNode = jsonMapper.createObjectNode(); - objectNode.put("query", jsonMapper.convertValue(nativeQuery, ObjectNode.class)); - objectNode.put("signature", jsonMapper.convertValue(druidQuery.getOutputRowSignature(), ArrayNode.class)); - objectNode.put( + objectNode.set("query", jsonMapper.convertValue(nativeQuery, ObjectNode.class)); + objectNode.set("signature", jsonMapper.convertValue(druidQuery.getOutputRowSignature(), ArrayNode.class)); + objectNode.set( "columnMappings", jsonMapper.convertValue(QueryUtils.buildColumnMappings(relRoot.fields, druidQuery), ArrayNode.class)); nativeQueriesArrayNode.add(objectNode); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java index 51fa0176700..d0f06fffa69 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java @@ -856,6 +856,11 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); + final String query = StringUtils.format( + "EXPLAIN PLAN FOR INSERT INTO dst SELECT * FROM %s PARTITIONED BY ALL TIME", + externSql(externalDataSource) + ); + ObjectMapper queryJsonMapper = queryFramework().queryJsonMapper(); final ScanQuery expectedQuery = newScanQueryBuilder() .dataSource(externalDataSource) @@ -896,10 +901,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN, ImmutableMap.of("sqlQueryId", "dummy"), Collections.emptyList(), - StringUtils.format( - "EXPLAIN PLAN FOR INSERT INTO dst SELECT * FROM %s PARTITIONED BY ALL TIME", - externSql(externalDataSource) - ), + query, CalciteTests.SUPER_USER_AUTH_RESULT, ImmutableList.of(), new DefaultResultsVerifier( @@ -920,10 +922,110 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN, ImmutableMap.of("sqlQueryId", "dummy"), Collections.emptyList(), - StringUtils.format( - "EXPLAIN PLAN FOR INSERT INTO dst SELECT * FROM %s PARTITIONED BY ALL TIME", - externSql(externalDataSource) + query, + CalciteTests.SUPER_USER_AUTH_RESULT, + ImmutableList.of(), + new DefaultResultsVerifier( + ImmutableList.of( + new Object[]{ + explanation, + resources, + attributes + } + ), + null ), + null + ); + + // Not using testIngestionQuery, so must set didTest manually to satisfy the check in tearDown. + didTest = true; + } + + @Test + public void testExplainPlanForInsertWithClusteredBy() throws JsonProcessingException + { + skipVectorize(); + + final String query = "EXPLAIN PLAN FOR INSERT INTO druid.dst " + + "SELECT __time, FLOOR(m1) as floor_m1, dim1, CEIL(m2) as ceil_m2 FROM foo " + + "PARTITIONED BY FLOOR(__time TO DAY) CLUSTERED BY 2, dim1 DESC, CEIL(m2)"; + + ObjectMapper queryJsonMapper = queryFramework().queryJsonMapper(); + final ScanQuery expectedQuery = newScanQueryBuilder() + .dataSource("foo") + .intervals(querySegmentSpec(Filtration.eternity())) + .columns("__time", "dim1", "v0", "v1") + .virtualColumns( + expressionVirtualColumn("v0", "floor(\"m1\")", ColumnType.FLOAT), + expressionVirtualColumn("v1", "ceil(\"m2\")", ColumnType.DOUBLE) + ) + .orderBy( + ImmutableList.of( + new ScanQuery.OrderBy("v0", ScanQuery.Order.ASCENDING), + new ScanQuery.OrderBy("dim1", ScanQuery.Order.DESCENDING), + new ScanQuery.OrderBy("v1", ScanQuery.Order.ASCENDING) + ) + ) + .context( + queryJsonMapper.readValue( + "{\"sqlInsertSegmentGranularity\":\"\\\"DAY\\\"\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}", + JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT + ) + ) + .build(); + + + final String legacyExplanation = + "DruidQueryRel(query=[" + + queryJsonMapper.writeValueAsString(expectedQuery) + + "], signature=[{__time:LONG, v0:FLOAT, dim1:STRING, v1:DOUBLE}])\n"; + + final String explanation = + "[" + + "{\"query\":{\"queryType\":\"scan\"," + + "\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]}," + + "\"virtualColumns\":[{\"type\":\"expression\",\"name\":\"v0\",\"expression\":\"floor(\\\"m1\\\")\",\"outputType\":\"FLOAT\"}," + + "{\"type\":\"expression\",\"name\":\"v1\",\"expression\":\"ceil(\\\"m2\\\")\",\"outputType\":\"DOUBLE\"}]," + + "\"resultFormat\":\"compactedList\"," + + "\"orderBy\":[{\"columnName\":\"v0\",\"order\":\"ascending\"},{\"columnName\":\"dim1\",\"order\":\"descending\"}," + + "{\"columnName\":\"v1\",\"order\":\"ascending\"}],\"columns\":[\"__time\",\"dim1\",\"v0\",\"v1\"],\"legacy\":false," + + "\"context\":{\"sqlInsertSegmentGranularity\":\"\\\"DAY\\\"\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}," + + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"v0\",\"type\":\"FLOAT\"},{\"name\":\"dim1\",\"type\":\"STRING\"}," + + "{\"name\":\"v1\",\"type\":\"DOUBLE\"}]," + + "\"columnMappings\":[{\"queryColumn\":\"__time\",\"outputColumn\":\"__time\"},{\"queryColumn\":\"v0\",\"outputColumn\":\"floor_m1\"}," + + "{\"queryColumn\":\"dim1\",\"outputColumn\":\"dim1\"},{\"queryColumn\":\"v1\",\"outputColumn\":\"ceil_m2\"}]" + + "}]"; + + final String resources = "[{\"name\":\"dst\",\"type\":\"DATASOURCE\"},{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]"; + final String attributes = "{\"statementType\":\"INSERT\",\"targetDataSource\":\"druid.dst\"}"; + + // Use testQuery for EXPLAIN (not testIngestionQuery). + testQuery( + PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN, + ImmutableMap.of("sqlQueryId", "dummy"), + Collections.emptyList(), + query, + CalciteTests.SUPER_USER_AUTH_RESULT, + ImmutableList.of(), + new DefaultResultsVerifier( + ImmutableList.of( + new Object[]{ + legacyExplanation, + resources, + attributes + } + ), + null + ), + null + ); + + testQuery( + PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN, + ImmutableMap.of("sqlQueryId", "dummy"), + Collections.emptyList(), + query, CalciteTests.SUPER_USER_AUTH_RESULT, ImmutableList.of(), new DefaultResultsVerifier( diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java index 4e583b4f787..2192fe246c7 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java @@ -600,6 +600,11 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); + final String query = StringUtils.format( + "EXPLAIN PLAN FOR REPLACE INTO dst OVERWRITE ALL SELECT * FROM %s PARTITIONED BY ALL TIME", + externSql(externalDataSource) + ); + ObjectMapper queryJsonMapper = queryFramework().queryJsonMapper(); final ScanQuery expectedQuery = newScanQueryBuilder() .dataSource(externalDataSource) @@ -638,10 +643,7 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN, ImmutableMap.of("sqlQueryId", "dummy"), Collections.emptyList(), - StringUtils.format( - "EXPLAIN PLAN FOR REPLACE INTO dst OVERWRITE ALL SELECT * FROM %s PARTITIONED BY ALL TIME", - externSql(externalDataSource) - ), + query, CalciteTests.SUPER_USER_AUTH_RESULT, ImmutableList.of(), new DefaultResultsVerifier( @@ -661,10 +663,7 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN, ImmutableMap.of("sqlQueryId", "dummy"), Collections.emptyList(), - StringUtils.format( - "EXPLAIN PLAN FOR REPLACE INTO dst OVERWRITE ALL SELECT * FROM %s PARTITIONED BY ALL TIME", - externSql(externalDataSource) - ), + query, CalciteTests.SUPER_USER_AUTH_RESULT, ImmutableList.of(), new DefaultResultsVerifier(