From 80db8cd93b0716ef94af0d9218fb07591fdcb251 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 May 2024 09:47:05 +0200 Subject: [PATCH 1/3] Bump org.openrewrite.maven:rewrite-maven-plugin from 5.27.0 to 5.31.0 (#16477) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 27ceb04cf68..f4dd5f0d451 100644 --- a/pom.xml +++ b/pom.xml @@ -1371,7 +1371,7 @@ org.openrewrite.maven rewrite-maven-plugin - 5.27.0 + 5.31.0 org.apache.druid.RewriteRules From 0ab3b341171753afae3d66ebbac0dad821949fdf Mon Sep 17 00:00:00 2001 From: Vadim Ogievetsky Date: Wed, 22 May 2024 08:33:07 -0700 Subject: [PATCH 2/3] Web console: enable copy data as inline SQL (via VALUES) (#16458) * copy as values * address NULL issue * add decription * extend test * fix json * more types * fix braces with nulls * fix test * update functions to scan --- web-console/script/create-sql-docs.js | 5 +- web-console/script/druid | 7 +- web-console/src/utils/download.ts | 54 ++++---- web-console/src/utils/index.tsx | 2 +- web-console/src/utils/sample-query.tsx | 82 ------------ web-console/src/utils/table-helpers.ts | 6 +- web-console/src/utils/types.ts | 19 ++- ...e-query.spec.tsx => values-query.spec.tsx} | 35 +++-- web-console/src/utils/values-query.tsx | 121 ++++++++++++++++++ .../schema-step/schema-step.tsx | 4 +- .../execution-summary-panel.tsx | 27 ++-- .../result-table-pane/result-table-pane.tsx | 2 +- 12 files changed, 219 insertions(+), 145 deletions(-) delete mode 100644 web-console/src/utils/sample-query.tsx rename web-console/src/utils/{sample-query.spec.tsx => values-query.spec.tsx} (57%) create mode 100644 web-console/src/utils/values-query.tsx diff --git a/web-console/script/create-sql-docs.js b/web-console/script/create-sql-docs.js index 82328fd74b5..86988837711 100755 --- a/web-console/script/create-sql-docs.js +++ b/web-console/script/create-sql-docs.js @@ -23,8 +23,8 @@ const snarkdown = require('snarkdown'); const writefile = 'lib/sql-docs.js'; -const MINIMUM_EXPECTED_NUMBER_OF_FUNCTIONS = 167; -const MINIMUM_EXPECTED_NUMBER_OF_DATA_TYPES = 14; +const MINIMUM_EXPECTED_NUMBER_OF_FUNCTIONS = 198; +const MINIMUM_EXPECTED_NUMBER_OF_DATA_TYPES = 15; const initialFunctionDocs = { TABLE: [['external', convertMarkdownToHtml('Defines a logical table from an external.')]], @@ -78,6 +78,7 @@ const readDoc = async () => { await fs.readFile('../docs/querying/sql-array-functions.md', 'utf-8'), await fs.readFile('../docs/querying/sql-multivalue-string-functions.md', 'utf-8'), await fs.readFile('../docs/querying/sql-json-functions.md', 'utf-8'), + await fs.readFile('../docs/querying/sql-window-functions.md', 'utf-8'), await fs.readFile('../docs/querying/sql-operators.md', 'utf-8'), ].join('\n'); diff --git a/web-console/script/druid b/web-console/script/druid index 69346d4a2b1..bff3cb490d9 100755 --- a/web-console/script/druid +++ b/web-console/script/druid @@ -62,9 +62,12 @@ function _build_distribution() { && cd apache-druid-$(_get_druid_version) \ && mkdir -p extensions/druid-testing-tools \ && cp "$(_get_code_root)/extensions-core/testing-tools/target/druid-testing-tools-$(_get_druid_version).jar" extensions/druid-testing-tools/ \ - && echo -e "\n\ndruid.extensions.loadList=[\"druid-hdfs-storage\", \"druid-kafka-indexing-service\", \"druid-datasketches\", \"druid-multi-stage-query\", \"druid-testing-tools\"]" >> conf/druid/single-server/micro-quickstart/_common/common.runtime.properties \ + && mkdir -p extensions/druid-compressed-bigdecimal \ + && cp "$(_get_code_root)/extensions-contrib/compressed-bigdecimal/target/druid-compressed-bigdecimal-$(_get_druid_version).jar" extensions/druid-compressed-bigdecimal/ \ + && echo -e "\n\ndruid.extensions.loadList=[\"druid-hdfs-storage\", \"druid-kafka-indexing-service\", \"druid-multi-stage-query\", \"druid-testing-tools\", \"druid-bloom-filter\", \"druid-datasketches\", \"druid-histogram\", \"druid-stats\", \"druid-compressed-bigdecimal\"]" >> conf/druid/single-server/micro-quickstart/_common/common.runtime.properties \ + && echo -e "\n\ndruid.extensions.loadList=[\"druid-hdfs-storage\", \"druid-kafka-indexing-service\", \"druid-multi-stage-query\", \"druid-testing-tools\", \"druid-bloom-filter\", \"druid-datasketches\", \"druid-histogram\", \"druid-stats\", \"druid-compressed-bigdecimal\"]" >> conf/druid/auto/_common/common.runtime.properties \ && echo -e "\n\ndruid.server.http.allowedHttpMethods=[\"HEAD\"]" >> conf/druid/single-server/micro-quickstart/_common/common.runtime.properties \ - && echo -e "\n\ndruid.generic.useDefaultValueForNull=false" >> conf/druid/single-server/micro-quickstart/_common/common.runtime.properties \ + && echo -e "\n\ndruid.server.http.allowedHttpMethods=[\"HEAD\"]" >> conf/druid/auto/_common/common.runtime.properties \ ) } diff --git a/web-console/src/utils/download.ts b/web-console/src/utils/download.ts index aebf5f14f63..aabe2fc1c66 100644 --- a/web-console/src/utils/download.ts +++ b/web-console/src/utils/download.ts @@ -21,6 +21,9 @@ import FileSaver from 'file-saver'; import * as JSONBig from 'json-bigint-native'; import { copyAndAlert, stringifyValue } from './general'; +import { queryResultToValuesQuery } from './values-query'; + +export type Format = 'csv' | 'tsv' | 'json' | 'sql'; export function downloadUrl(url: string, filename: string) { // Create a link and set the URL using `createObjectURL` @@ -74,44 +77,43 @@ export function downloadFile(text: string, type: string, filename: string): void FileSaver.saveAs(blob, filename); } -function queryResultsToString(queryResult: QueryResult, format: string): string { - let lines: string[] = []; - let separator = ''; +function queryResultsToString(queryResult: QueryResult, format: Format): string { + const { header, rows } = queryResult; - if (format === 'csv' || format === 'tsv') { - separator = format === 'csv' ? ',' : '\t'; - lines.push( - queryResult.header.map(column => formatForFormat(column.name, format)).join(separator), - ); - lines = lines.concat( - queryResult.rows.map(r => r.map(cell => formatForFormat(cell, format)).join(separator)), - ); - } else { - // json - lines = queryResult.rows.map(r => { - const outputObject: Record = {}; - for (let k = 0; k < r.length; k++) { - const newName = queryResult.header[k]; - if (newName) { - outputObject[newName.name] = r[k]; - } - } - return JSONBig.stringify(outputObject); - }); + switch (format) { + case 'csv': + case 'tsv': { + const separator = format === 'csv' ? ',' : '\t'; + return [ + header.map(column => formatForFormat(column.name, format)).join(separator), + ...rows.map(r => r.map(cell => formatForFormat(cell, format)).join(separator)), + ].join('\n'); + } + + case 'sql': + return queryResultToValuesQuery(queryResult).toString(); + + case 'json': + return queryResult + .toObjectArray() + .map(r => JSONBig.stringify(r)) + .join('\n'); + + default: + throw new Error(`unknown format: ${format}`); } - return lines.join('\n'); } export function downloadQueryResults( queryResult: QueryResult, filename: string, - format: string, + format: Format, ): void { const resultString: string = queryResultsToString(queryResult, format); downloadFile(resultString, format, filename); } -export function copyQueryResultsToClipboard(queryResult: QueryResult, format: string): void { +export function copyQueryResultsToClipboard(queryResult: QueryResult, format: Format): void { const resultString: string = queryResultsToString(queryResult, format); copyAndAlert(resultString, 'Query results copied to clipboard'); } diff --git a/web-console/src/utils/index.tsx b/web-console/src/utils/index.tsx index 244eec5372c..4daeefe61c1 100644 --- a/web-console/src/utils/index.tsx +++ b/web-console/src/utils/index.tsx @@ -32,8 +32,8 @@ export * from './object-change'; export * from './query-action'; export * from './query-manager'; export * from './query-state'; -export * from './sample-query'; export * from './sanitizers'; export * from './sql'; export * from './table-helpers'; export * from './types'; +export * from './values-query'; diff --git a/web-console/src/utils/sample-query.tsx b/web-console/src/utils/sample-query.tsx deleted file mode 100644 index 2c32618fac1..00000000000 --- a/web-console/src/utils/sample-query.tsx +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import type { Column, LiteralValue, QueryResult, SqlExpression } from '@druid-toolkit/query'; -import { - C, - F, - L, - RefName, - SqlAlias, - SqlColumnList, - SqlQuery, - SqlRecord, - SqlValues, -} from '@druid-toolkit/query'; - -import { oneOf } from './general'; - -const SAMPLE_ARRAY_SEPARATOR = '<#>'; // Note that this is a regexp so don't add anything that is a special regexp thing - -function nullForColumn(column: Column): LiteralValue { - return oneOf(column.sqlType, 'BIGINT', 'DOUBLE', 'FLOAT') ? 0 : ''; -} - -export function sampleDataToQuery(sample: QueryResult): SqlQuery { - const { header, rows } = sample; - return SqlQuery.create( - new SqlAlias({ - expression: SqlValues.create( - rows.map(row => - SqlRecord.create( - row.map((r, i) => { - if (header[i].nativeType === 'COMPLEX') { - return L(JSON.stringify(r)); - } else if (String(header[i].sqlType).endsWith(' ARRAY')) { - return L(r.join(SAMPLE_ARRAY_SEPARATOR)); - } else if (r == null || typeof r === 'object') { - // Avoid actually using NULL literals as they create havoc in the VALUES type system and throw errors. - // Also, cleanup array if it happens to get here, it shouldn't. - return L(nullForColumn(header[i])); - } else { - return L(r); - } - }), - ), - ), - ), - alias: RefName.alias('t'), - columns: SqlColumnList.create(header.map((_, i) => RefName.create(`c${i}`, true))), - }), - ).changeSelectExpressions( - header.map(({ name, nativeType, sqlType }, i) => { - let ex: SqlExpression = C(`c${i}`); - if (nativeType === 'COMPLEX') { - ex = F('PARSE_JSON', ex); - } else if (sqlType && sqlType.endsWith(' ARRAY')) { - ex = F('STRING_TO_ARRAY', ex, SAMPLE_ARRAY_SEPARATOR); - if (sqlType !== 'VARCHAR ARRAY') { - ex = ex.cast(sqlType); - } - } else if (sqlType) { - ex = ex.cast(sqlType); - } - return ex.as(name, true); - }), - ); -} diff --git a/web-console/src/utils/table-helpers.ts b/web-console/src/utils/table-helpers.ts index a04635c61c5..a2b97350fa4 100644 --- a/web-console/src/utils/table-helpers.ts +++ b/web-console/src/utils/table-helpers.ts @@ -55,11 +55,11 @@ export function getNumericColumnBraces( queryResult.header.forEach((column, i) => { if (!oneOf(column.nativeType, 'LONG', 'FLOAT', 'DOUBLE')) return; const formatter = columnHints?.get(column.name)?.formatter || formatNumber; - const brace = filterMap(rows, row => + const braces = filterMap(rows, row => oneOf(typeof row[i], 'number', 'bigint') ? formatter(row[i]) : undefined, ); - if (rows.length === brace.length) { - numericColumnBraces[i] = brace; + if (braces.length) { + numericColumnBraces[i] = braces; } }); } diff --git a/web-console/src/utils/types.ts b/web-console/src/utils/types.ts index 76437e58327..8ebbd6938b3 100644 --- a/web-console/src/utils/types.ts +++ b/web-console/src/utils/types.ts @@ -41,6 +41,9 @@ export function dataTypeToIcon(dataType: string): IconName { const typeUpper = dataType.toUpperCase(); switch (typeUpper) { + case 'NULL': + return IconNames.CIRCLE; + case 'TIMESTAMP': return IconNames.TIME; @@ -75,12 +78,17 @@ export function dataTypeToIcon(dataType: string): IconName { return IconNames.DIAGRAM_TREE; case 'COMPLEX': + case 'COMPLEX': case 'COMPLEX': + case 'COMPLEX': case 'COMPLEX': return IconNames.SNOWFLAKE; case 'COMPLEX': case 'COMPLEX': + case 'COMPLEX': + case 'COMPLEX': + case 'COMPLEX': return IconNames.HORIZONTAL_DISTRIBUTION; case 'COMPLEX': @@ -93,8 +101,15 @@ export function dataTypeToIcon(dataType: string): IconName { case 'COMPLEX': return IconNames.DOUBLE_CHEVRON_RIGHT; - case 'NULL': - return IconNames.CIRCLE; + case 'COMPLEX': + return IconNames.FILTER_LIST; + + case 'COMPLEX': + case 'COMPLEX': + return IconNames.HURRICANE; + + case 'COMPLEX': + return IconNames.SORT_NUMERICAL_DESC; default: if (typeUpper.startsWith('ARRAY')) return IconNames.ARRAY; diff --git a/web-console/src/utils/sample-query.spec.tsx b/web-console/src/utils/values-query.spec.tsx similarity index 57% rename from web-console/src/utils/sample-query.spec.tsx rename to web-console/src/utils/values-query.spec.tsx index 2864f0d28fe..99884f382c1 100644 --- a/web-console/src/utils/sample-query.spec.tsx +++ b/web-console/src/utils/values-query.spec.tsx @@ -18,26 +18,32 @@ import { QueryResult, sane } from '@druid-toolkit/query'; -import { sampleDataToQuery } from './sample-query'; +import { queryResultToValuesQuery } from './values-query'; -describe('sample-query', () => { +describe('queryResultToValuesQuery', () => { it('works', () => { const result = QueryResult.fromRawResult( [ - ['__time', 'host', 'service', 'msg'], - ['LONG', 'STRING', 'STRING', 'COMPLEX'], - ['TIMESTAMP', 'VARCHAR', 'VARCHAR', 'OTHER'], + ['__time', 'host', 'service', 'msg', 'language', 'nums', 'nulls'], + ['LONG', 'STRING', 'STRING', 'COMPLEX', 'ARRAY', 'ARRAY', 'STRING'], + ['TIMESTAMP', 'VARCHAR', 'VARCHAR', 'OTHER', 'ARRAY', 'ARRAY', 'VARCHAR'], [ '2022-02-01T00:00:00.000Z', 'brokerA.internal', 'broker', '{"type":"sys","swap/free":1223334,"swap/max":3223334}', + ['es', 'es-419'], + [1], + null, ], [ '2022-02-01T00:00:00.000Z', 'brokerA.internal', 'broker', '{"type":"query","time":1223,"bytes":2434234}', + ['en', 'es', 'es-419'], + [2, 3], + null, ], ], false, @@ -46,17 +52,20 @@ describe('sample-query', () => { true, ); - expect(sampleDataToQuery(result).toString()).toEqual(sane` + expect(queryResultToValuesQuery(result).toString()).toEqual(sane` SELECT - CAST("c0" AS TIMESTAMP) AS "__time", - CAST("c1" AS VARCHAR) AS "host", - CAST("c2" AS VARCHAR) AS "service", - PARSE_JSON("c3") AS "msg" + CAST("c1" AS TIMESTAMP) AS "__time", + CAST("c2" AS VARCHAR) AS "host", + CAST("c3" AS VARCHAR) AS "service", + PARSE_JSON("c4") AS "msg", + STRING_TO_ARRAY("c5", '<#>') AS "language", + CAST(STRING_TO_ARRAY("c6", '<#>') AS BIGINT ARRAY) AS "nums", + CAST(NULL AS VARCHAR) AS "nulls" FROM ( VALUES - ('2022-02-01T00:00:00.000Z', 'brokerA.internal', 'broker', '"{\\"type\\":\\"sys\\",\\"swap/free\\":1223334,\\"swap/max\\":3223334}"'), - ('2022-02-01T00:00:00.000Z', 'brokerA.internal', 'broker', '"{\\"type\\":\\"query\\",\\"time\\":1223,\\"bytes\\":2434234}"') - ) AS "t" ("c0", "c1", "c2", "c3") + ('2022-02-01T00:00:00.000Z', 'brokerA.internal', 'broker', '{"type":"sys","swap/free":1223334,"swap/max":3223334}', 'es<#>es-419', '1', NULL), + ('2022-02-01T00:00:00.000Z', 'brokerA.internal', 'broker', '{"type":"query","time":1223,"bytes":2434234}', 'en<#>es<#>es-419', '2<#>3', NULL) + ) AS "t" ("c1", "c2", "c3", "c4", "c5", "c6", "c7") `); }); }); diff --git a/web-console/src/utils/values-query.tsx b/web-console/src/utils/values-query.tsx new file mode 100644 index 00000000000..2f1a5f699ca --- /dev/null +++ b/web-console/src/utils/values-query.tsx @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { Column, QueryResult, SqlExpression } from '@druid-toolkit/query'; +import { + C, + F, + L, + RefName, + SqlAlias, + SqlColumnList, + SqlQuery, + SqlRecord, + SqlType, + SqlValues, +} from '@druid-toolkit/query'; +import * as JSONBig from 'json-bigint-native'; + +import { oneOf } from './general'; + +const SAMPLE_ARRAY_SEPARATOR = '<#>'; // Note that this is a regexp so don't add anything that is a special regexp thing + +/** + This function corrects for the legacy behaviour where Druid sometimes returns array columns as + { sqlType: 'ARRAY', nativeType: 'ARRAY' } + instead of the more correct description of + { sqlType: 'VARCHAR ARRAY', nativeType: 'ARRAY' } + use this function to get the effective SQL type of `VARCHAR ARRAY` + */ +function getEffectiveSqlType(column: Column): string | undefined { + const sqlType = column.sqlType; + if (sqlType === 'ARRAY' && String(column.nativeType).startsWith('ARRAY<')) { + return `${SqlType.fromNativeType(String(column.nativeType).slice(6, -1))} ARRAY`; + } + return sqlType; +} + +function columnIsAllNulls(rows: readonly unknown[][], columnIndex: number): boolean { + return rows.every(row => row[columnIndex] === null); +} + +function isJsonString(x: unknown): boolean { + return typeof x === 'string' && oneOf(x[0], '"', '{', '['); +} + +export function queryResultToValuesQuery(sample: QueryResult): SqlQuery { + const { header, rows } = sample; + return SqlQuery.create( + new SqlAlias({ + expression: SqlValues.create( + rows.map(row => + SqlRecord.create( + row.map((r, i) => { + const column = header[i]; + const { nativeType } = column; + const sqlType = getEffectiveSqlType(column); + if (nativeType === 'COMPLEX') { + return L(isJsonString(r) ? r : JSONBig.stringify(r)); + } else if (String(sqlType).endsWith(' ARRAY')) { + return L(r.join(SAMPLE_ARRAY_SEPARATOR)); + } else if ( + sqlType === 'OTHER' && + String(nativeType).startsWith('COMPLEX<') && + typeof r === 'string' && + r.startsWith('"') && + r.endsWith('"') + ) { + // r is a JSON encoded base64 string + return L(r.slice(1, -1)); + } else if (typeof r === 'object') { + // Cleanup array if it happens to get here, it shouldn't. + return L.NULL; + } else { + return L(r); + } + }), + ), + ), + ), + alias: RefName.alias('t'), + columns: SqlColumnList.create(header.map((_, i) => RefName.create(`c${i + 1}`, true))), + }), + ).changeSelectExpressions( + header.map((column, i) => { + const { name, nativeType } = column; + const sqlType = getEffectiveSqlType(column); + + // The columnIsAllNulls check is needed due to https://github.com/apache/druid/issues/16456 + // Remove it when the issue above is resolved + let ex: SqlExpression = columnIsAllNulls(rows, i) ? L.NULL : C(`c${i + 1}`); + if (nativeType === 'COMPLEX') { + ex = F('PARSE_JSON', ex); + } else if (String(sqlType).endsWith(' ARRAY')) { + ex = F('STRING_TO_ARRAY', ex, SAMPLE_ARRAY_SEPARATOR); + if (sqlType && sqlType !== 'ARRAY' && sqlType !== 'VARCHAR ARRAY') { + ex = ex.cast(sqlType); + } + } else if (sqlType === 'OTHER' && String(nativeType).startsWith('COMPLEX<')) { + ex = F('DECODE_BASE64_COMPLEX', String(nativeType).slice(8, -1), ex); + } else if (sqlType && sqlType !== 'OTHER') { + ex = ex.cast(sqlType); + } + return ex.as(name, true); + }), + ); +} diff --git a/web-console/src/views/sql-data-loader-view/schema-step/schema-step.tsx b/web-console/src/views/sql-data-loader-view/schema-step/schema-step.tsx index 149843adc0a..5f0d24f69b1 100644 --- a/web-console/src/views/sql-data-loader-view/schema-step/schema-step.tsx +++ b/web-console/src/views/sql-data-loader-view/schema-step/schema-step.tsx @@ -79,7 +79,7 @@ import { filterMap, oneOf, queryDruidSql, - sampleDataToQuery, + queryResultToValuesQuery, tickIcon, timeFormatToSql, wait, @@ -479,7 +479,7 @@ export const SchemaStep = function SchemaStep(props: SchemaStepProps) { const sampleDataQuery = useMemo(() => { if (!sampleState.data) return; - return sampleDataToQuery(sampleState.data); + return queryResultToValuesQuery(sampleState.data); }, [sampleState.data]); const previewQueryString = useLastDefined( diff --git a/web-console/src/views/workbench-view/execution-summary-panel/execution-summary-panel.tsx b/web-console/src/views/workbench-view/execution-summary-panel/execution-summary-panel.tsx index 75999b5ee15..e64e05a703b 100644 --- a/web-console/src/views/workbench-view/execution-summary-panel/execution-summary-panel.tsx +++ b/web-console/src/views/workbench-view/execution-summary-panel/execution-summary-panel.tsx @@ -16,13 +16,14 @@ * limitations under the License. */ -import { Button, ButtonGroup, Menu, MenuDivider, MenuItem, Position } from '@blueprintjs/core'; +import { Button, ButtonGroup, Menu, MenuItem, Position } from '@blueprintjs/core'; import { IconNames } from '@blueprintjs/icons'; import { Popover2 } from '@blueprintjs/popover2'; import type { JSX } from 'react'; import React, { useState } from 'react'; import type { Execution } from '../../../druid-models'; +import type { Format } from '../../../utils'; import { copyQueryResultsToClipboard, downloadQueryResults, @@ -76,11 +77,11 @@ export const ExecutionSummaryPanel = React.memo(function ExecutionSummaryPanel( const warningCount = execution?.stages?.getWarningCount(); - const handleDownload = (format: string) => { + const handleDownload = (format: Format) => { downloadQueryResults(queryResult, `results-${execution.id}.${format}`, format); }; - const handleCopy = (format: string) => { + const handleCopy = (format: Format) => { copyQueryResultsToClipboard(queryResult, format); }; @@ -113,14 +114,18 @@ export const ExecutionSummaryPanel = React.memo(function ExecutionSummaryPanel( className="download-button" content={ - - handleDownload('csv')} /> - handleDownload('tsv')} /> - handleDownload('json')} /> - - handleCopy('csv')} /> - handleCopy('tsv')} /> - handleCopy('json')} /> + + handleDownload('csv')} /> + handleDownload('tsv')} /> + handleDownload('json')} /> + handleDownload('sql')} /> + + + handleCopy('csv')} /> + handleCopy('tsv')} /> + handleCopy('json')} /> + handleCopy('sql')} /> + } position={Position.BOTTOM_RIGHT} diff --git a/web-console/src/views/workbench-view/result-table-pane/result-table-pane.tsx b/web-console/src/views/workbench-view/result-table-pane/result-table-pane.tsx index 294aeeeb798..9b074937c5a 100644 --- a/web-console/src/views/workbench-view/result-table-pane/result-table-pane.tsx +++ b/web-console/src/views/workbench-view/result-table-pane/result-table-pane.tsx @@ -620,7 +620,7 @@ export const ResultTablePane = React.memo(function ResultTablePane(props: Result {numericColumnBraces[i] ? ( From 44ea4e1c51b60fdc31bd47926e9dacfa79d7cb44 Mon Sep 17 00:00:00 2001 From: Zoltan Haindrich Date: Wed, 22 May 2024 20:42:11 +0200 Subject: [PATCH 3/3] Fix cds-coordinator-metadata-query-disabled (#16488) fixes the issue with the newly enabled `cds-coordiantor-metadata-query-disabled` [split](https://github.com/apache/druid/pull/16468) * configures to use `prepopulated-data` environment things to configure `S3` for access * this is needed because these tests use a [dataset which is loaded from s3](https://github.com/apache/druid/blob/master/integration-tests/docker/test-data/cds-coordinator-metadata-query-disabled-sample-data.sql) * also undoes the previous [fix](https://github.com/apache/druid/pull/16469) of setting the aws region explicitly as this is a more complete solution - and configuring `prepopulated-data` also sets the region; so that's not needed anymore --- .github/workflows/standard-its.yml | 1 + .../docker/docker-compose.cds-task-schema-publish-disabled.yml | 1 - .../docker/environment-configs/test-groups/prepopulated-data | 2 +- .../druid/metadata/IndexerSQLMetadataStorageCoordinator.java | 1 - 4 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/standard-its.yml b/.github/workflows/standard-its.yml index c00940b0a39..0083e952ea3 100644 --- a/.github/workflows/standard-its.yml +++ b/.github/workflows/standard-its.yml @@ -54,6 +54,7 @@ jobs: build_jdk: 8 runtime_jdk: 8 testing_groups: -Dgroups=${{ matrix.testing_group }} + override_config_path: ./environment-configs/test-groups/prepopulated-data use_indexer: middleManager group: ${{ matrix.testing_group }} diff --git a/integration-tests/docker/docker-compose.cds-task-schema-publish-disabled.yml b/integration-tests/docker/docker-compose.cds-task-schema-publish-disabled.yml index 9e9bda73abf..190f4eaf8bc 100644 --- a/integration-tests/docker/docker-compose.cds-task-schema-publish-disabled.yml +++ b/integration-tests/docker/docker-compose.cds-task-schema-publish-disabled.yml @@ -63,7 +63,6 @@ services: service: druid-historical environment: - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP} - - AWS_REGION=us-west-2 depends_on: - druid-zookeeper-kafka diff --git a/integration-tests/docker/environment-configs/test-groups/prepopulated-data b/integration-tests/docker/environment-configs/test-groups/prepopulated-data index deb59dfe402..fd1c7412824 100644 --- a/integration-tests/docker/environment-configs/test-groups/prepopulated-data +++ b/integration-tests/docker/environment-configs/test-groups/prepopulated-data @@ -20,7 +20,7 @@ AWS_REGION=us-east-1 # If you are making a change in load list below, make the necessary changes in github actions too -druid_extensions_loadList=["mysql-metadata-storage","druid-s3-extensions","druid-basic-security","simple-client-sslcontext","druid-testing-tools","druid-lookups-cached-global","druid-histogram","druid-datasketches","druid-integration-tests"] +druid_extensions_loadList=["mysql-metadata-storage","druid-s3-extensions","druid-basic-security","simple-client-sslcontext","druid-testing-tools","druid-lookups-cached-global","druid-histogram","druid-datasketches","druid-integration-tests","druid-parquet-extensions","druid-avro-extensions","druid-protobuf-extensions","druid-orc-extensions","druid-kafka-indexing-service"] # Setting s3 credentials and region to use pre-populated data for testing. druid_s3_accessKey=AKIAT2GGLKKJQCMG64V4 diff --git a/server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java b/server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java index f64c2a0f75f..2b02f09926b 100644 --- a/server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java +++ b/server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java @@ -129,7 +129,6 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor CentralizedDatasourceSchemaConfig centralizedDatasourceSchemaConfig ) { - this.jsonMapper = jsonMapper; this.dbTables = dbTables; this.connector = connector;