Web console: correctly cancel JSON shaped SQL queries (#12134)

* misc fixes

* type typo
This commit is contained in:
Vadim Ogievetsky 2022-01-10 14:24:05 -08:00 committed by GitHub
parent 7c17341caa
commit 2a41b7bffa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 118 additions and 86 deletions

View File

@ -5107,7 +5107,7 @@ license_category: binary
module: web-console
license_name: Apache License version 2.0
copyright: Palantir Technologies
version: 3.26.1
version: 3.31.0
---
@ -5574,7 +5574,7 @@ license_category: binary
module: web-console
license_name: Apache License version 2.0
copyright: Imply Data
version: 0.14.4
version: 0.14.6
---
@ -5992,7 +5992,7 @@ license_category: binary
module: web-console
license_name: MIT License
copyright: Javier Blanco
version: 1.0.6
version: 1.0.7
license_file_path: licenses/bin/path-parse.MIT
---

View File

@ -3057,9 +3057,9 @@
}
},
"@blueprintjs/icons": {
"version": "3.26.1",
"resolved": "https://registry.npmjs.org/@blueprintjs/icons/-/icons-3.26.1.tgz",
"integrity": "sha512-qxAnqhzt5WqYuWN+7p3Pk7bOrhj56YXzD8n79Met8+VIQ14IpKnxr7YAYmNB5X/dVweqXpEk6tKyUjnDjBhulw==",
"version": "3.31.0",
"resolved": "https://registry.npmjs.org/@blueprintjs/icons/-/icons-3.31.0.tgz",
"integrity": "sha512-6pXhHC8zEvoDKN5KNsIHNuCRKsemmRbXNv1jweB95VaFzR1M+Mik+Qi+13Wd+VtZrzes2ZcWttIeyuK91NoLCw==",
"requires": {
"classnames": "^2.2",
"tslib": "~1.13.0"
@ -8091,9 +8091,9 @@
}
},
"druid-query-toolkit": {
"version": "0.14.4",
"resolved": "https://registry.npmjs.org/druid-query-toolkit/-/druid-query-toolkit-0.14.4.tgz",
"integrity": "sha512-PmD5vwoHQxNxZ8E8vRdHvh5OjuvA+yHD5dhiKDzIzPtnFiwRHLJKyOLSQ6rmN1VAKbOdU4JCZIzPFUB8bEMBAQ==",
"version": "0.14.6",
"resolved": "https://registry.npmjs.org/druid-query-toolkit/-/druid-query-toolkit-0.14.6.tgz",
"integrity": "sha512-Dv/oXD80+2SEV8J8m8Ib6giIU5fWcHK0hr/l04NbZMCpZhX/9NLDWW9HEQltRp9EyD3UEHbkoMChcbyRPAgc8w==",
"requires": {
"tslib": "^2.2.0"
}

View File

@ -68,7 +68,7 @@
"dependencies": {
"@blueprintjs/core": "^3.45.0",
"@blueprintjs/datetime": "^3.23.4",
"@blueprintjs/icons": "^3.26.1",
"@blueprintjs/icons": "^3.31.0",
"@blueprintjs/popover2": "^0.10.1",
"ace-builds": "^1.4.13",
"axios": "^0.21.4",
@ -79,7 +79,7 @@
"d3-axis": "^1.0.12",
"d3-scale": "^3.2.0",
"d3-selection": "^1.4.0",
"druid-query-toolkit": "^0.14.4",
"druid-query-toolkit": "^0.14.6",
"file-saver": "^2.0.2",
"fontsource-open-sans": "^3.0.9",
"has-own-prop": "^2.0.0",

View File

@ -148,6 +148,7 @@ export const JsonInput = React.memo(function JsonInput(props: JsonInputProps) {
enableLiveAutocompletion: false,
showLineNumbers: false,
tabSize: 2,
newLineMode: 'unix' as any, // newLineMode is incorrectly assumed to be boolean in the typings
}}
style={{}}
onLoad={editor => {

View File

@ -66,6 +66,7 @@ export const SpecDialog = React.memo(function SpecDialog(props: SpecDialogProps)
setOptions={{
showLineNumbers: true,
tabSize: 2,
newLineMode: 'unix' as any, // newLineMode is incorrectly assumed to be boolean in the typings
}}
style={{}}
placeholder="{ JSON spec... }"

View File

@ -29,16 +29,16 @@ describe('StatusDialog', () => {
});
it('filters data that contains input', () => {
const data = [
const row = [
'org.apache.druid.common.gcp.GcpModule',
'org.apache.druid.common.aws.AWSModule',
'io.imply.druid.UtilityBeltModule',
'org.apache.druid.OtherModule',
];
expect(anywhereMatcher({ id: '0', value: 'common' }, data)).toEqual(true);
expect(anywhereMatcher({ id: '1', value: 'common' }, data)).toEqual(true);
expect(anywhereMatcher({ id: '0', value: 'org' }, data)).toEqual(true);
expect(anywhereMatcher({ id: '1', value: 'org' }, data)).toEqual(true);
expect(anywhereMatcher({ id: '2', value: 'common' }, data)).toEqual(false);
expect(anywhereMatcher({ id: '0', value: 'common' }, row)).toEqual(true);
expect(anywhereMatcher({ id: '1', value: 'common' }, row)).toEqual(true);
expect(anywhereMatcher({ id: '0', value: 'org' }, row)).toEqual(true);
expect(anywhereMatcher({ id: '1', value: 'org' }, row)).toEqual(true);
expect(anywhereMatcher({ id: '2', value: 'common' }, row)).toEqual(false);
});
});

View File

@ -28,7 +28,7 @@ import {
} from './ingestion-spec';
describe('ingestion-spec', () => {
it('upgrades / downgrades task spec', () => {
it('upgrades / downgrades task spec 1', () => {
const oldTaskSpec = {
type: 'index_parallel',
spec: {
@ -36,7 +36,7 @@ describe('ingestion-spec', () => {
type: 'index_parallel',
firehose: {
type: 'http',
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
uris: ['https://website.com/wikipedia.json.gz'],
},
},
tuningConfig: {
@ -157,7 +157,46 @@ describe('ingestion-spec', () => {
},
inputSource: {
type: 'http',
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
uris: ['https://website.com/wikipedia.json.gz'],
},
type: 'index_parallel',
},
tuningConfig: {
type: 'index_parallel',
},
},
type: 'index_parallel',
});
});
it('upgrades / downgrades task spec (without parser)', () => {
const oldTaskSpec = {
type: 'index_parallel',
ioConfig: {
type: 'index_parallel',
firehose: { type: 'http', uris: ['https://website.com/wikipedia.json.gz'] },
},
tuningConfig: { type: 'index_parallel' },
dataSchema: {
dataSource: 'new-data-source',
granularitySpec: { type: 'uniform', segmentGranularity: 'DAY', queryGranularity: 'HOUR' },
},
};
expect(upgradeSpec(oldTaskSpec)).toEqual({
spec: {
dataSchema: {
dataSource: 'new-data-source',
granularitySpec: {
queryGranularity: 'HOUR',
segmentGranularity: 'DAY',
type: 'uniform',
},
},
ioConfig: {
inputSource: {
type: 'http',
uris: ['https://website.com/wikipedia.json.gz'],
},
type: 'index_parallel',
},
@ -364,7 +403,7 @@ describe('spec utils', () => {
type: 'index_parallel',
inputSource: {
type: 'http',
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
uris: ['https://website.com/wikipedia.json.gz'],
},
inputFormat: {
type: 'json',
@ -439,7 +478,7 @@ describe('spec utils', () => {
"inputSource": Object {
"type": "http",
"uris": Array [
"https://static.imply.io/data/wikipedia.json.gz",
"https://website.com/wikipedia.json.gz",
],
},
"type": "index_parallel",
@ -490,7 +529,7 @@ describe('spec utils', () => {
"inputSource": Object {
"type": "http",
"uris": Array [
"https://static.imply.io/data/wikipedia.json.gz",
"https://website.com/wikipedia.json.gz",
],
},
"type": "index_parallel",

View File

@ -1325,7 +1325,7 @@ export interface TuningConfig {
}
export interface PartitionsSpec {
type: 'string';
type: string;
// For type: dynamic
maxTotalRows?: number;
@ -2261,8 +2261,7 @@ export function upgradeSpec(spec: any): Partial<IngestionSpec> {
};
}
if (!deepGet(spec, 'spec.dataSchema.parser')) return spec;
// Upgrade firehose if exists
if (deepGet(spec, 'spec.ioConfig.firehose')) {
switch (deepGet(spec, 'spec.ioConfig.firehose.type')) {
case 'static-s3':
@ -2278,6 +2277,8 @@ export function upgradeSpec(spec: any): Partial<IngestionSpec> {
spec = deepMove(spec, 'spec.ioConfig.firehose', 'spec.ioConfig.inputSource');
}
// Decompose parser if exists
if (deepGet(spec, 'spec.dataSchema.parser')) {
spec = deepMove(
spec,
'spec.dataSchema.parser.parseSpec.timestampSpec',
@ -2291,6 +2292,7 @@ export function upgradeSpec(spec: any): Partial<IngestionSpec> {
spec = deepMove(spec, 'spec.dataSchema.parser.parseSpec', 'spec.ioConfig.inputFormat');
spec = deepDelete(spec, 'spec.dataSchema.parser');
spec = deepMove(spec, 'spec.ioConfig.inputFormat.format', 'spec.ioConfig.inputFormat.type');
}
return spec;
}

View File

@ -57,8 +57,17 @@ export function parseHtmlError(htmlStr: string): string | undefined {
.replace(/&gt;/g, '>');
}
function getDruidErrorObject(e: any): DruidErrorResponse | string {
if (e.response) {
// This is a direct axios response error
return e.response.data || {};
} else {
return e; // Assume the error was passed in directly
}
}
export function getDruidErrorMessage(e: any): string {
const data: DruidErrorResponse | string = (e.response || {}).data || {};
const data = getDruidErrorObject(e);
switch (typeof data) {
case 'object':
return (
@ -241,7 +250,7 @@ export class DruidError extends Error {
if (axios.isCancel(e)) {
this.canceled = true;
} else {
const data: DruidErrorResponse | string = (e.response || {}).data || {};
const data = getDruidErrorObject(e);
let druidErrorResponse: DruidErrorResponse;
switch (typeof data) {

View File

@ -40,7 +40,7 @@ import {
import { Api } from '../singletons';
import { getDruidErrorMessage, queryDruidRune } from './druid-query';
import { arrangeWithPrefixSuffix, EMPTY_ARRAY, filterMap, oneOf } from './general';
import { arrangeWithPrefixSuffix, EMPTY_ARRAY, filterMap } from './general';
import { deepGet, deepSet } from './object-change';
const SAMPLER_URL = `/druid/indexer/v1/sampler`;
@ -65,7 +65,6 @@ export interface SampleResponse {
export type CacheRows = Record<string, any>[];
export interface SampleResponseWithExtraInfo extends SampleResponse {
queryGranularity?: any;
rollup?: boolean;
columns?: Record<string, any>;
aggregators?: Record<string, any>;
@ -236,26 +235,6 @@ function fixSamplerTypes(sampleSpec: SampleSpec): SampleSpec {
return sampleSpec;
}
function cleanupQueryGranularity(queryGranularity: any): any {
let queryGranularityType = deepGet(queryGranularity, 'type');
if (typeof queryGranularityType !== 'string') return queryGranularity;
queryGranularityType = queryGranularityType.toUpperCase();
const knownGranularity = oneOf(
queryGranularityType,
'NONE',
'SECOND',
'MINUTE',
'HOUR',
'DAY',
'WEEK',
'MONTH',
'YEAR',
);
return knownGranularity ? queryGranularityType : queryGranularity;
}
export async function sampleForConnect(
spec: Partial<IngestionSpec>,
sampleStrategy: SampleStrategy,
@ -305,14 +284,11 @@ export async function sampleForConnect(
intervals: [deepGet(ioConfig, 'inputSource.interval')],
merge: true,
lenientAggregatorMerge: true,
analysisTypes: ['timestampSpec', 'queryGranularity', 'aggregators', 'rollup'],
analysisTypes: ['aggregators', 'rollup'],
});
if (Array.isArray(segmentMetadataResponse) && segmentMetadataResponse.length === 1) {
const segmentMetadataResponse0 = segmentMetadataResponse[0];
samplerResponse.queryGranularity = cleanupQueryGranularity(
segmentMetadataResponse0.queryGranularity,
);
samplerResponse.rollup = segmentMetadataResponse0.rollup;
samplerResponse.columns = segmentMetadataResponse0.columns;
samplerResponse.aggregators = segmentMetadataResponse0.aggregators;

View File

@ -28,7 +28,7 @@ describe('utils', () => {
type: 'index_parallel',
inputSource: {
type: 'http',
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
uris: ['https://website.com/wikipedia.json.gz'],
},
inputFormat: {
type: 'json',

View File

@ -1277,13 +1277,11 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
);
}
if (inputData.queryGranularity) {
newSpec = deepSet(
newSpec,
'spec.dataSchema.granularitySpec.queryGranularity',
inputData.queryGranularity,
'none',
);
}
if (inputData.columns) {
const aggregators = inputData.aggregators || {};

View File

@ -262,6 +262,7 @@ export class QueryInput extends React.PureComponent<QueryInputProps, QueryInputS
enableLiveAutocompletion: !runeMode,
showLineNumbers: true,
tabSize: 2,
newLineMode: 'unix' as any, // newLineMode is incorrectly assumed to be boolean in the typings
}}
style={{}}
placeholder="SELECT * FROM ..."

View File

@ -114,6 +114,11 @@ export class QueryView extends React.PureComponent<QueryViewProps, QueryViewStat
return queryString.trim().startsWith('{');
}
static isSql(query: any): boolean {
if (typeof query === 'string') return true;
return typeof query.query === 'string';
}
static validRune(queryString: string): boolean {
try {
Hjson.parse(queryString);
@ -204,20 +209,20 @@ export class QueryView extends React.PureComponent<QueryViewProps, QueryViewStat
cancelToken,
): Promise<QueryResult> => {
const { queryString, queryContext, wrapQueryLimit } = queryWithContext;
const isSql = !QueryView.isJsonLike(queryString);
const query = isSql ? queryString : Hjson.parse(queryString);
const context = { ...queryContext, ...(mandatoryQueryContext || {}) };
const query = QueryView.isJsonLike(queryString) ? Hjson.parse(queryString) : queryString;
const isSql = QueryView.isSql(query);
const extraQueryContext = { ...queryContext, ...(mandatoryQueryContext || {}) };
if (typeof wrapQueryLimit !== 'undefined') {
context.sqlOuterLimit = wrapQueryLimit + 1;
if (isSql && typeof wrapQueryLimit !== 'undefined') {
extraQueryContext.sqlOuterLimit = wrapQueryLimit + 1;
}
const queryIdKey = isSql ? 'sqlQueryId' : 'queryId';
// Look for the queryId in the JSON itself (if native) or in the context object.
let cancelQueryId = (isSql ? undefined : query.context?.queryId) || context[queryIdKey];
// Look for an existing queryId in the JSON itself or in the extra context object.
let cancelQueryId = query.context?.[queryIdKey] || extraQueryContext[queryIdKey];
if (!cancelQueryId) {
// If the queryId (sqlQueryId) is not explicitly set on the context generate one so it is possible to cancel the query.
cancelQueryId = context[queryIdKey] = uuidv4();
// If the queryId (sqlQueryId) is not explicitly set on the context generate one thus making it possible to cancel the query.
cancelQueryId = extraQueryContext[queryIdKey] = uuidv4();
}
void cancelToken.promise
@ -231,7 +236,7 @@ export class QueryView extends React.PureComponent<QueryViewProps, QueryViewStat
try {
return await queryRunner.runQuery({
query,
extraQueryContext: context,
extraQueryContext,
cancelToken,
});
} catch (e) {