Return `RESOURCES` in `EXPLAIN PLAN` as an ordered collection (#14323)

* Make resources an ordered collection so it's deterministic.

* test cleanup

* fixup docs.

* Replace deprecated ObjectNode#put() calls with ObjectNode#set().
This commit is contained in:
Abhishek Radhakrishnan 2023-05-22 22:55:00 -07:00 committed by GitHub
parent a5e04d95a4
commit 338bdb35ea
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 126 additions and 21 deletions

View File

@ -66,7 +66,7 @@ The [EXPLAIN PLAN](sql.md#explain-plan) functionality can help you understand ho
be translated to native.
EXPLAIN PLAN statements return:
- a `PLAN` column that contains a JSON array of native queries that Druid will run
- a `RESOURCES` column that describes the resource being queried as well as a `PLAN` column that contains a JSON array of native queries that Druid will run
- a `RESOURCES` column that describes the resources used in the query
- a `ATTRIBUTES` column that describes the attributes of a query, such as the statement type and target data source
For example, consider the following query:

View File

@ -75,6 +75,7 @@ import org.apache.druid.utils.Throwables;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@ -376,8 +377,11 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand
}
}
}
final Set<Resource> resources =
plannerContext.getResourceActions().stream().map(ResourceAction::getResource).collect(Collectors.toSet());
final List<Resource> resources = plannerContext.getResourceActions()
.stream()
.map(ResourceAction::getResource)
.sorted(Comparator.comparing(Resource::getName))
.collect(Collectors.toList());
resourcesString = plannerContext.getJsonMapper().writeValueAsString(resources);
}
catch (JsonProcessingException jpe) {
@ -431,9 +435,9 @@ public abstract class QueryHandler extends SqlStatementHandler.BaseStatementHand
for (DruidQuery druidQuery : druidQueryList) {
Query<?> nativeQuery = druidQuery.getQuery();
ObjectNode objectNode = jsonMapper.createObjectNode();
objectNode.put("query", jsonMapper.convertValue(nativeQuery, ObjectNode.class));
objectNode.put("signature", jsonMapper.convertValue(druidQuery.getOutputRowSignature(), ArrayNode.class));
objectNode.put(
objectNode.set("query", jsonMapper.convertValue(nativeQuery, ObjectNode.class));
objectNode.set("signature", jsonMapper.convertValue(druidQuery.getOutputRowSignature(), ArrayNode.class));
objectNode.set(
"columnMappings",
jsonMapper.convertValue(QueryUtils.buildColumnMappings(relRoot.fields, druidQuery), ArrayNode.class));
nativeQueriesArrayNode.add(objectNode);

View File

@ -856,6 +856,11 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();
final String query = StringUtils.format(
"EXPLAIN PLAN FOR INSERT INTO dst SELECT * FROM %s PARTITIONED BY ALL TIME",
externSql(externalDataSource)
);
ObjectMapper queryJsonMapper = queryFramework().queryJsonMapper();
final ScanQuery expectedQuery = newScanQueryBuilder()
.dataSource(externalDataSource)
@ -896,10 +901,7 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN,
ImmutableMap.of("sqlQueryId", "dummy"),
Collections.emptyList(),
StringUtils.format(
"EXPLAIN PLAN FOR INSERT INTO dst SELECT * FROM %s PARTITIONED BY ALL TIME",
externSql(externalDataSource)
),
query,
CalciteTests.SUPER_USER_AUTH_RESULT,
ImmutableList.of(),
new DefaultResultsVerifier(
@ -920,10 +922,110 @@ public class CalciteInsertDmlTest extends CalciteIngestionDmlTest
PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN,
ImmutableMap.of("sqlQueryId", "dummy"),
Collections.emptyList(),
StringUtils.format(
"EXPLAIN PLAN FOR INSERT INTO dst SELECT * FROM %s PARTITIONED BY ALL TIME",
externSql(externalDataSource)
query,
CalciteTests.SUPER_USER_AUTH_RESULT,
ImmutableList.of(),
new DefaultResultsVerifier(
ImmutableList.of(
new Object[]{
explanation,
resources,
attributes
}
),
null
),
null
);
// Not using testIngestionQuery, so must set didTest manually to satisfy the check in tearDown.
didTest = true;
}
@Test
public void testExplainPlanForInsertWithClusteredBy() throws JsonProcessingException
{
skipVectorize();
final String query = "EXPLAIN PLAN FOR INSERT INTO druid.dst "
+ "SELECT __time, FLOOR(m1) as floor_m1, dim1, CEIL(m2) as ceil_m2 FROM foo "
+ "PARTITIONED BY FLOOR(__time TO DAY) CLUSTERED BY 2, dim1 DESC, CEIL(m2)";
ObjectMapper queryJsonMapper = queryFramework().queryJsonMapper();
final ScanQuery expectedQuery = newScanQueryBuilder()
.dataSource("foo")
.intervals(querySegmentSpec(Filtration.eternity()))
.columns("__time", "dim1", "v0", "v1")
.virtualColumns(
expressionVirtualColumn("v0", "floor(\"m1\")", ColumnType.FLOAT),
expressionVirtualColumn("v1", "ceil(\"m2\")", ColumnType.DOUBLE)
)
.orderBy(
ImmutableList.of(
new ScanQuery.OrderBy("v0", ScanQuery.Order.ASCENDING),
new ScanQuery.OrderBy("dim1", ScanQuery.Order.DESCENDING),
new ScanQuery.OrderBy("v1", ScanQuery.Order.ASCENDING)
)
)
.context(
queryJsonMapper.readValue(
"{\"sqlInsertSegmentGranularity\":\"\\\"DAY\\\"\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}",
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
)
)
.build();
final String legacyExplanation =
"DruidQueryRel(query=["
+ queryJsonMapper.writeValueAsString(expectedQuery)
+ "], signature=[{__time:LONG, v0:FLOAT, dim1:STRING, v1:DOUBLE}])\n";
final String explanation =
"["
+ "{\"query\":{\"queryType\":\"scan\","
+ "\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},"
+ "\"virtualColumns\":[{\"type\":\"expression\",\"name\":\"v0\",\"expression\":\"floor(\\\"m1\\\")\",\"outputType\":\"FLOAT\"},"
+ "{\"type\":\"expression\",\"name\":\"v1\",\"expression\":\"ceil(\\\"m2\\\")\",\"outputType\":\"DOUBLE\"}],"
+ "\"resultFormat\":\"compactedList\","
+ "\"orderBy\":[{\"columnName\":\"v0\",\"order\":\"ascending\"},{\"columnName\":\"dim1\",\"order\":\"descending\"},"
+ "{\"columnName\":\"v1\",\"order\":\"ascending\"}],\"columns\":[\"__time\",\"dim1\",\"v0\",\"v1\"],\"legacy\":false,"
+ "\"context\":{\"sqlInsertSegmentGranularity\":\"\\\"DAY\\\"\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}},"
+ "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"v0\",\"type\":\"FLOAT\"},{\"name\":\"dim1\",\"type\":\"STRING\"},"
+ "{\"name\":\"v1\",\"type\":\"DOUBLE\"}],"
+ "\"columnMappings\":[{\"queryColumn\":\"__time\",\"outputColumn\":\"__time\"},{\"queryColumn\":\"v0\",\"outputColumn\":\"floor_m1\"},"
+ "{\"queryColumn\":\"dim1\",\"outputColumn\":\"dim1\"},{\"queryColumn\":\"v1\",\"outputColumn\":\"ceil_m2\"}]"
+ "}]";
final String resources = "[{\"name\":\"dst\",\"type\":\"DATASOURCE\"},{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
final String attributes = "{\"statementType\":\"INSERT\",\"targetDataSource\":\"druid.dst\"}";
// Use testQuery for EXPLAIN (not testIngestionQuery).
testQuery(
PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN,
ImmutableMap.of("sqlQueryId", "dummy"),
Collections.emptyList(),
query,
CalciteTests.SUPER_USER_AUTH_RESULT,
ImmutableList.of(),
new DefaultResultsVerifier(
ImmutableList.of(
new Object[]{
legacyExplanation,
resources,
attributes
}
),
null
),
null
);
testQuery(
PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN,
ImmutableMap.of("sqlQueryId", "dummy"),
Collections.emptyList(),
query,
CalciteTests.SUPER_USER_AUTH_RESULT,
ImmutableList.of(),
new DefaultResultsVerifier(

View File

@ -600,6 +600,11 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest
// Skip vectorization since otherwise the "context" will change for each subtest.
skipVectorize();
final String query = StringUtils.format(
"EXPLAIN PLAN FOR REPLACE INTO dst OVERWRITE ALL SELECT * FROM %s PARTITIONED BY ALL TIME",
externSql(externalDataSource)
);
ObjectMapper queryJsonMapper = queryFramework().queryJsonMapper();
final ScanQuery expectedQuery = newScanQueryBuilder()
.dataSource(externalDataSource)
@ -638,10 +643,7 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest
PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN,
ImmutableMap.of("sqlQueryId", "dummy"),
Collections.emptyList(),
StringUtils.format(
"EXPLAIN PLAN FOR REPLACE INTO dst OVERWRITE ALL SELECT * FROM %s PARTITIONED BY ALL TIME",
externSql(externalDataSource)
),
query,
CalciteTests.SUPER_USER_AUTH_RESULT,
ImmutableList.of(),
new DefaultResultsVerifier(
@ -661,10 +663,7 @@ public class CalciteReplaceDmlTest extends CalciteIngestionDmlTest
PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN,
ImmutableMap.of("sqlQueryId", "dummy"),
Collections.emptyList(),
StringUtils.format(
"EXPLAIN PLAN FOR REPLACE INTO dst OVERWRITE ALL SELECT * FROM %s PARTITIONED BY ALL TIME",
externSql(externalDataSource)
),
query,
CalciteTests.SUPER_USER_AUTH_RESULT,
ImmutableList.of(),
new DefaultResultsVerifier(