Web console: catchup to all the backend changes (#14540)

This PR catches the console up to all the backend changes for Druid 27

Specifically:

Add page information to SqlStatementResource API #14512
Allow empty tiered replicants map for load rules #14432
Adding Interactive API's for MSQ engine #14416
Add replication factor column to sys table #14403
Account for data format and compression in MSQ auto taskAssignment #14307
Errors take 3 #14004
This commit is contained in:
Vadim Ogievetsky 2023-07-16 22:56:46 -07:00 committed by GitHub
parent 214f7c3f65
commit d5f6749aa3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
141 changed files with 3015 additions and 1419 deletions

View File

@ -5358,6 +5358,15 @@ version: 4.9.22
---
name: "@druid-toolkit/query"
license_category: binary
module: web-console
license_name: Apache License version 2.0
copyright: Imply Data
version: 0.20.5
---
name: "@emotion/cache"
license_category: binary
module: web-console
@ -5926,15 +5935,6 @@ license_file_path: licenses/bin/dot-case.MIT
---
name: "druid-query-toolkit"
license_category: binary
module: web-console
license_name: Apache License version 2.0
copyright: Imply Data
version: 0.18.12
---
name: "emotion"
license_category: binary
module: web-console

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { T } from 'druid-query-toolkit';
import { T } from '@druid-toolkit/query';
import type * as playwright from 'playwright-chromium';
import { DatasourcesOverview } from './component/datasources/overview';

View File

@ -14,6 +14,7 @@
"@blueprintjs/datetime2": "^0.9.35",
"@blueprintjs/icons": "^4.16.0",
"@blueprintjs/popover2": "^1.14.9",
"@druid-toolkit/query": "^0.20.5",
"ace-builds": "~1.4.14",
"axios": "^0.26.1",
"classnames": "^2.2.6",
@ -23,7 +24,6 @@
"d3-axis": "^2.1.0",
"d3-scale": "^3.3.0",
"d3-selection": "^2.0.0",
"druid-query-toolkit": "^0.18.12",
"file-saver": "^2.0.2",
"follow-redirects": "^1.14.7",
"fontsource-open-sans": "^3.0.9",
@ -2578,6 +2578,14 @@
"node": ">=10.0.0"
}
},
"node_modules/@druid-toolkit/query": {
"version": "0.20.5",
"resolved": "https://registry.npmjs.org/@druid-toolkit/query/-/query-0.20.5.tgz",
"integrity": "sha512-EY0131z611tklnui+vyRqsoPjTBbonkF7WwsNvT0KsBQYm5qtuvX/QlXGfX66f4KQzoo5G/4dRIVmZ9JbSRgzw==",
"dependencies": {
"tslib": "^2.5.2"
}
},
"node_modules/@emotion/cache": {
"version": "10.0.29",
"resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-10.0.29.tgz",
@ -8115,17 +8123,6 @@
"tslib": "^2.0.3"
}
},
"node_modules/druid-query-toolkit": {
"version": "0.18.12",
"resolved": "https://registry.npmjs.org/druid-query-toolkit/-/druid-query-toolkit-0.18.12.tgz",
"integrity": "sha512-wDcZUW8vhiJXARC44EFFwUeZW6lawXWv++bxHIUKaxq3M5byBuWPKjEDTCdPEHprxmR2sxaTpsPw4A6KiRmBog==",
"dependencies": {
"tslib": "^2.3.1"
},
"engines": {
"node": ">=14"
}
},
"node_modules/duplexer": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz",
@ -26628,6 +26625,14 @@
"integrity": "sha512-HyYEUDeIj5rRQU2Hk5HTB2uHsbRQpF70nvMhVzi+VJR0X+xNEhjPui4/kBf3VeH/wqD28PT4sVOm8qqLjBrSZg==",
"dev": true
},
"@druid-toolkit/query": {
"version": "0.20.5",
"resolved": "https://registry.npmjs.org/@druid-toolkit/query/-/query-0.20.5.tgz",
"integrity": "sha512-EY0131z611tklnui+vyRqsoPjTBbonkF7WwsNvT0KsBQYm5qtuvX/QlXGfX66f4KQzoo5G/4dRIVmZ9JbSRgzw==",
"requires": {
"tslib": "^2.5.2"
}
},
"@emotion/cache": {
"version": "10.0.29",
"resolved": "https://registry.npmjs.org/@emotion/cache/-/cache-10.0.29.tgz",
@ -31027,14 +31032,6 @@
"tslib": "^2.0.3"
}
},
"druid-query-toolkit": {
"version": "0.18.12",
"resolved": "https://registry.npmjs.org/druid-query-toolkit/-/druid-query-toolkit-0.18.12.tgz",
"integrity": "sha512-wDcZUW8vhiJXARC44EFFwUeZW6lawXWv++bxHIUKaxq3M5byBuWPKjEDTCdPEHprxmR2sxaTpsPw4A6KiRmBog==",
"requires": {
"tslib": "^2.3.1"
}
},
"duplexer": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz",

View File

@ -68,6 +68,7 @@
"@blueprintjs/datetime2": "^0.9.35",
"@blueprintjs/icons": "^4.16.0",
"@blueprintjs/popover2": "^1.14.9",
"@druid-toolkit/query": "^0.20.5",
"ace-builds": "~1.4.14",
"axios": "^0.26.1",
"classnames": "^2.2.6",
@ -77,7 +78,6 @@
"d3-axis": "^2.1.0",
"d3-scale": "^3.3.0",
"d3-selection": "^2.0.0",
"druid-query-toolkit": "^0.18.12",
"file-saver": "^2.0.2",
"follow-redirects": "^1.14.7",
"fontsource-open-sans": "^3.0.9",

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { QueryResult } from 'druid-query-toolkit';
import { QueryResult } from '@druid-toolkit/query';
import * as JSONBig from 'json-bigint-native';
export function bootstrapJsonParse() {

View File

@ -7,7 +7,7 @@ exports[`AutoForm matches snapshot 1`] = `
<Memo(FormGroupWithInfo)
label="Test number"
>
<Memo(NumericInputWithDefault)
<Memo(FancyNumericInput)
disabled={false}
fill={true}
min={0}

View File

@ -136,5 +136,26 @@ describe('AutoForm', () => {
COMPACTION_CONFIG_FIELDS,
),
).toEqual('field tuningConfig.totalNumMergeTasks is defined but it should not be');
expect(
AutoForm.issueWithModel(
{
dataSource: 'ds',
taskPriority: 25,
skipOffsetFromLatest: 'P4D',
tuningConfig: {
partitionsSpec: {
type: 'not_a_know_partition_spec',
maxRowsPerSegment: 5000000,
},
totalNumMergeTasks: 5,
type: 'index_parallel',
forceGuaranteedRollup: false,
},
taskContext: null,
},
COMPACTION_CONFIG_FIELDS,
),
).toBeUndefined();
});
});

View File

@ -25,14 +25,15 @@ import {
NumericInput,
} from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import type { JSX } from 'react';
import React from 'react';
import { deepDelete, deepGet, deepSet, durationSanitizer } from '../../utils';
import { ArrayInput } from '../array-input/array-input';
import { FancyNumericInput } from '../fancy-numeric-input/fancy-numeric-input';
import { FormGroupWithInfo } from '../form-group-with-info/form-group-with-info';
import { IntervalInput } from '../interval-input/interval-input';
import { JsonInput } from '../json-input/json-input';
import { NumericInputWithDefault } from '../numeric-input-with-default/numeric-input-with-default';
import { PopoverText } from '../popover-text/popover-text';
import { SuggestibleInput } from '../suggestible-input/suggestible-input';
import type { Suggestion } from '../suggestion-menu/suggestion-menu';
@ -47,6 +48,7 @@ export interface Field<M> {
info?: React.ReactNode;
type:
| 'number'
| 'ratio'
| 'size-bytes'
| 'string'
| 'duration'
@ -64,7 +66,7 @@ export interface Field<M> {
zeroMeansUndefined?: boolean;
height?: string;
disabled?: Functor<M, boolean>;
defined?: Functor<M, boolean>;
defined?: Functor<M, boolean | undefined>;
required?: Functor<M, boolean>;
multiline?: Functor<M, boolean>;
hide?: Functor<M, boolean>;
@ -81,6 +83,11 @@ export interface Field<M> {
}) => JSX.Element;
}
function toNumberOrUndefined(n: unknown): number | undefined {
const r = Number(n);
return isNaN(r) ? undefined : r;
}
interface ComputedFieldValues {
required: boolean;
defaultValue?: any;
@ -155,10 +162,13 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
// Precompute which fields are defined because fields could be defined twice and only one should do the checking
const definedFields: Record<string, Field<M>> = {};
const notDefinedFields: Record<string, Field<M>> = {};
for (const field of fields) {
const fieldDefined = AutoForm.evaluateFunctor(field.defined, model, true);
if (fieldDefined) {
definedFields[field.name] = field;
} else if (fieldDefined === false) {
notDefinedFields[field.name] = field;
}
}
@ -180,7 +190,7 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
if (valueIssue) return `field ${field.name} has issue ${valueIssue}`;
}
}
} else {
} else if (notDefinedFields[field.name]) {
// The field is undefined
if (fieldValueDefined) {
return `field ${field.name} is defined but it should not be`;
@ -249,15 +259,14 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
const { required, defaultValue, modelValue } = AutoForm.computeFieldValues(model, field);
return (
<NumericInputWithDefault
value={modelValue}
defaultValue={defaultValue}
onValueChange={(valueAsNumber: number, valueAsString: string) => {
let newValue: number | undefined;
if (valueAsString !== '' && !isNaN(valueAsNumber)) {
newValue = valueAsNumber === 0 && field.zeroMeansUndefined ? undefined : valueAsNumber;
}
this.fieldChange(field, newValue);
<FancyNumericInput
value={toNumberOrUndefined(modelValue)}
defaultValue={toNumberOrUndefined(defaultValue)}
onValueChange={valueAsNumber => {
this.fieldChange(
field,
valueAsNumber === 0 && field.zeroMeansUndefined ? undefined : valueAsNumber,
);
}}
onBlur={e => {
if (e.target.value === '') {
@ -265,7 +274,7 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
}
if (onFinalize) onFinalize();
}}
min={field.min || 0}
min={field.min ?? 0}
max={field.max}
fill
large={large}
@ -276,6 +285,40 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
);
}
private renderRatioInput(field: Field<T>): JSX.Element {
const { model, large, onFinalize } = this.props;
const { required, defaultValue, modelValue } = AutoForm.computeFieldValues(model, field);
return (
<FancyNumericInput
value={toNumberOrUndefined(modelValue)}
defaultValue={toNumberOrUndefined(defaultValue)}
onValueChange={valueAsNumber => {
this.fieldChange(
field,
valueAsNumber === 0 && field.zeroMeansUndefined ? undefined : valueAsNumber,
);
}}
onBlur={e => {
if (e.target.value === '') {
this.fieldChange(field, undefined);
}
if (onFinalize) onFinalize();
}}
min={field.min ?? 0}
max={field.max ?? 1}
minorStepSize={0.001}
stepSize={0.01}
majorStepSize={0.05}
fill
large={large}
disabled={AutoForm.evaluateFunctor(field.disabled, model, false)}
placeholder={AutoForm.evaluateFunctor(field.placeholder, model, '')}
intent={required && modelValue == null ? AutoForm.REQUIRED_INTENT : undefined}
/>
);
}
private renderSizeBytesInput(field: Field<T>): JSX.Element {
const { model, large, onFinalize } = this.props;
const { required, defaultValue, modelValue } = AutoForm.computeFieldValues(model, field);
@ -445,6 +488,8 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
switch (field.type) {
case 'number':
return this.renderNumberInput(field);
case 'ratio':
return this.renderRatioInput(field);
case 'size-bytes':
return this.renderSizeBytesInput(field);
case 'string':
@ -510,7 +555,7 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
);
}
render(): JSX.Element {
render() {
const { fields, model, showCustom } = this.props;
const { showMore, customDialog } = this.state;

View File

@ -18,6 +18,7 @@
import classNames from 'classnames';
import { max } from 'd3-array';
import type { JSX } from 'react';
import React, { Fragment } from 'react';
import './braced-text.scss';

View File

@ -18,8 +18,8 @@
import { Menu, MenuItem } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import type { Column, SqlExpression, SqlQuery } from 'druid-query-toolkit';
import { C, L, SqlComparison, SqlLiteral, SqlRecord, trimString } from 'druid-query-toolkit';
import type { Column, SqlExpression, SqlQuery } from '@druid-toolkit/query';
import { C, L, SqlComparison, SqlLiteral, SqlRecord, trimString } from '@druid-toolkit/query';
import React from 'react';
import type { QueryAction } from '../../utils';

View File

@ -16,6 +16,7 @@
* limitations under the License.
*/
import type { JSX } from 'react';
import React from 'react';
export interface DeferredProps {

View File

@ -0,0 +1,32 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`FancyNumericInput matches snapshot 1`] = `
<Blueprint4.ControlGroup
className="fancy-numeric-input bp4-numeric-input"
>
<Blueprint4.InputGroup
autoComplete="off"
onBlur={[Function]}
onChange={[Function]}
onKeyDown={[Function]}
value="5"
/>
<Blueprint4.ButtonGroup
className="bp4-fixed"
vertical={true}
>
<Blueprint4.Button
aria-label="increment"
disabled={false}
icon="chevron-up"
onMouseDown={[Function]}
/>
<Blueprint4.Button
aria-label="decrement"
disabled={false}
icon="chevron-down"
onMouseDown={[Function]}
/>
</Blueprint4.ButtonGroup>
</Blueprint4.ControlGroup>
`;

View File

@ -20,11 +20,13 @@ import React from 'react';
import { shallow } from '../../utils/shallow-renderer';
import { NumericInputWithDefault } from './numeric-input-with-default';
import { FancyNumericInput } from './fancy-numeric-input';
describe('NumericInputWithDefault', () => {
describe('FancyNumericInput', () => {
it('matches snapshot', () => {
const numericInputWithDefault = shallow(<NumericInputWithDefault value={5} defaultValue={3} />);
const numericInputWithDefault = shallow(
<FancyNumericInput value={5} defaultValue={3} onValueChange={() => {}} />,
);
expect(numericInputWithDefault).toMatchSnapshot();
});

View File

@ -0,0 +1,224 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { InputGroupProps2, Intent } from '@blueprintjs/core';
import { Button, ButtonGroup, Classes, ControlGroup, InputGroup, Keys } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { SqlExpression, SqlFunction, SqlLiteral, SqlMulti } from '@druid-toolkit/query';
import classNames from 'classnames';
import React, { useEffect, useState } from 'react';
import { clamp } from '../../utils';
const MULTI_OP_TO_REDUCER: Record<string, (a: number, b: number) => number> = {
'+': (a, b) => a + b,
'-': (a, b) => a - b,
'*': (a, b) => a * b,
'/': (a, b) => (b ? a / b : 0),
};
function evaluateSqlSimple(sql: SqlExpression): number | undefined {
if (sql instanceof SqlLiteral) {
return sql.getNumberValue();
} else if (sql instanceof SqlMulti) {
const evaluatedArgs = sql.getArgArray().map(evaluateSqlSimple);
if (evaluatedArgs.some(x => typeof x === 'undefined')) return;
const reducer = MULTI_OP_TO_REDUCER[sql.op];
if (!reducer) return;
return (evaluatedArgs as number[]).reduce(reducer);
} else if (sql instanceof SqlFunction && sql.getEffectiveFunctionName() === 'PI') {
return Math.PI;
} else {
return;
}
}
function numberToShown(n: number | undefined): string {
if (typeof n === 'undefined') return '';
return String(n);
}
function shownToNumber(s: string): number | undefined {
const parsed = SqlExpression.maybeParse(s);
if (!parsed) return;
return evaluateSqlSimple(parsed);
}
export interface FancyNumericInputProps {
className?: string;
intent?: Intent;
fill?: boolean;
large?: boolean;
small?: boolean;
disabled?: boolean;
readOnly?: boolean;
placeholder?: string;
onBlur?: InputGroupProps2['onBlur'];
value: number | undefined;
defaultValue?: number;
onValueChange(value: number): void;
min?: number;
max?: number;
minorStepSize?: number;
stepSize?: number;
majorStepSize?: number;
}
export const FancyNumericInput = React.memo(function FancyNumericInput(
props: FancyNumericInputProps,
) {
const {
className,
intent,
fill,
large,
small,
disabled,
readOnly,
placeholder,
onBlur,
value,
defaultValue,
onValueChange,
min,
max,
} = props;
const stepSize = props.stepSize || 1;
const minorStepSize = props.minorStepSize || stepSize;
const majorStepSize = props.majorStepSize || stepSize * 10;
function roundAndClamp(n: number): number {
const inv = 1 / minorStepSize;
return clamp(Math.floor(n * inv) / inv, min, max);
}
const effectiveValue = value ?? defaultValue;
const [shownValue, setShownValue] = useState<string>(numberToShown(effectiveValue));
const shownNumberRaw = shownToNumber(shownValue);
const shownNumberClamped = shownNumberRaw ? roundAndClamp(shownNumberRaw) : undefined;
useEffect(() => {
if (effectiveValue !== shownNumberClamped) {
setShownValue(numberToShown(effectiveValue));
}
}, [effectiveValue]);
const containerClasses = classNames(
'fancy-numeric-input',
Classes.NUMERIC_INPUT,
{ [Classes.LARGE]: large, [Classes.SMALL]: small },
className,
);
const effectiveDisabled = disabled || readOnly;
const isIncrementDisabled = max !== undefined && value !== undefined && +value >= max;
const isDecrementDisabled = min !== undefined && value !== undefined && +value <= min;
function changeValue(newValue: number): void {
onValueChange(roundAndClamp(newValue));
}
function increment(delta: number): void {
if (typeof shownNumberRaw !== 'number') return;
changeValue(shownNumberRaw + delta);
}
function getIncrementSize(isShiftKeyPressed: boolean, isAltKeyPressed: boolean): number {
if (isShiftKeyPressed) {
return majorStepSize;
}
if (isAltKeyPressed) {
return minorStepSize;
}
return stepSize;
}
return (
<ControlGroup className={containerClasses} fill={fill}>
<InputGroup
autoComplete="off"
aria-valuemax={max}
aria-valuemin={min}
small={small}
large={large}
placeholder={placeholder}
value={shownValue}
onChange={e => {
const valueAsString = (e.target as HTMLInputElement).value;
setShownValue(valueAsString);
const shownNumber = shownToNumber(valueAsString);
if (typeof shownNumber === 'number') {
changeValue(shownNumber);
}
}}
onBlur={e => {
setShownValue(numberToShown(effectiveValue));
onBlur?.(e);
}}
onKeyDown={e => {
const { keyCode } = e;
if (keyCode === Keys.ENTER && typeof shownNumberClamped === 'number') {
setShownValue(numberToShown(shownNumberClamped));
return;
}
let direction = 0;
if (keyCode === Keys.ARROW_UP) {
direction = 1;
} else if (keyCode === Keys.ARROW_DOWN) {
direction = -1;
}
if (direction) {
// when the input field has focus, some key combinations will modify
// the field's selection range. we'll actually want to select all
// text in the field after we modify the value on the following
// lines. preventing the default selection behavior lets us do that
// without interference.
e.preventDefault();
increment(direction * getIncrementSize(e.shiftKey, e.altKey));
}
}}
/>
<ButtonGroup className={Classes.FIXED} vertical>
<Button
aria-label="increment"
disabled={effectiveDisabled || isIncrementDisabled}
icon={IconNames.CHEVRON_UP}
intent={intent}
onMouseDown={e => increment(getIncrementSize(e.shiftKey, e.altKey))}
/>
<Button
aria-label="decrement"
disabled={effectiveDisabled || isDecrementDisabled}
icon={IconNames.CHEVRON_DOWN}
intent={intent}
onMouseDown={e => increment(-getIncrementSize(e.shiftKey, e.altKey))}
/>
</ButtonGroup>
</ControlGroup>
);
});

View File

@ -19,6 +19,7 @@
import { FormGroup, Icon } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { Popover2 } from '@blueprintjs/popover2';
import type { JSX } from 'react';
import React from 'react';
import './form-group-with-info.scss';

View File

@ -239,6 +239,17 @@ exports[`HeaderBar matches snapshot 1`] = `
shouldDismissPopover={true}
text="Overlord dynamic config"
/>
<Blueprint4.MenuItem
active={false}
disabled={false}
icon="compressed"
multiline={false}
onClick={[Function]}
popoverProps={Object {}}
selected={false}
shouldDismissPopover={true}
text="Compaction dynamic config"
/>
<Blueprint4.MenuDivider />
<Blueprint4.MenuItem
active={false}

View File

@ -32,10 +32,12 @@ import {
} from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { Popover2 } from '@blueprintjs/popover2';
import type { JSX } from 'react';
import React, { useState } from 'react';
import {
AboutDialog,
CompactionDynamicConfigDialog,
CoordinatorDynamicConfigDialog,
DoctorDialog,
OverlordDynamicConfigDialog,
@ -239,6 +241,7 @@ export const HeaderBar = React.memo(function HeaderBar(props: HeaderBarProps) {
const [coordinatorDynamicConfigDialogOpen, setCoordinatorDynamicConfigDialogOpen] =
useState(false);
const [overlordDynamicConfigDialogOpen, setOverlordDynamicConfigDialogOpen] = useState(false);
const [compactionDynamicConfigDialogOpen, setCompactionDynamicConfigDialogOpen] = useState(false);
const showSplitDataLoaderMenu = capabilities.hasMultiStageQuery();
@ -341,6 +344,12 @@ export const HeaderBar = React.memo(function HeaderBar(props: HeaderBarProps) {
onClick={() => setOverlordDynamicConfigDialogOpen(true)}
disabled={!capabilities.hasOverlordAccess()}
/>
<MenuItem
icon={IconNames.COMPRESSED}
text="Compaction dynamic config"
onClick={() => setCompactionDynamicConfigDialogOpen(true)}
disabled={!capabilities.hasCoordinatorAccess()}
/>
<MenuDivider />
<MenuItem icon={IconNames.COG} text="Console options">
@ -494,6 +503,11 @@ export const HeaderBar = React.memo(function HeaderBar(props: HeaderBarProps) {
{overlordDynamicConfigDialogOpen && (
<OverlordDynamicConfigDialog onClose={() => setOverlordDynamicConfigDialogOpen(false)} />
)}
{compactionDynamicConfigDialogOpen && (
<CompactionDynamicConfigDialog
onClose={() => setCompactionDynamicConfigDialogOpen(false)}
/>
)}
</Navbar>
);
});

View File

@ -16,29 +16,50 @@
* limitations under the License.
*/
import type { JSX } from 'react';
import React from 'react';
import './highlight-text.scss';
export interface HighlightTextProps {
text: string;
find: string;
replace: string | JSX.Element;
find: string | RegExp;
replace: string | JSX.Element | ((found: string) => string | JSX.Element);
}
export const HighlightText = React.memo(function HighlightText(props: HighlightTextProps) {
const { text, find, replace } = props;
const startIndex = text.indexOf(find);
let startIndex = -1;
let found = '';
if (typeof find === 'string') {
startIndex = text.indexOf(find);
if (startIndex !== -1) {
found = find;
}
} else {
const m = find.exec(text);
if (m) {
startIndex = m.index;
found = m[0];
}
}
if (startIndex === -1) return <span className="highlight-text">text</span>;
const endIndex = startIndex + find.length;
const endIndex = startIndex + found.length;
const pre = text.substring(0, startIndex);
const post = text.substring(endIndex);
const replaceValue = typeof replace === 'function' ? replace(found) : replace;
return (
<span className="highlight-text">
{Boolean(pre) && <span className="pre">{text.substring(0, startIndex)}</span>}
{typeof replace === 'string' ? <span className="highlighted">{replace}</span> : replace}
{typeof replaceValue === 'string' ? (
<span className="highlighted">{replaceValue}</span>
) : (
replaceValue
)}
{Boolean(post) && <span className="post">{text.substring(endIndex)}</span>}
</span>
);

View File

@ -1,20 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`NumericInputWithDefault matches snapshot 1`] = `
<Blueprint4.NumericInput
allowNumericCharactersOnly={true}
buttonPosition="right"
clampValueOnBlur={false}
defaultValue=""
large={false}
majorStepSize={10}
minorStepSize={0.1}
onBlur={[Function]}
onValueChange={[Function]}
selectAllOnFocus={false}
selectAllOnIncrement={false}
small={false}
stepSize={1}
value={5}
/>
`;

View File

@ -1,50 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { HTMLInputProps, NumericInputProps } from '@blueprintjs/core';
import { NumericInput } from '@blueprintjs/core';
import React, { useState } from 'react';
export type NumericInputWithDefaultProps = HTMLInputProps & NumericInputProps;
export const NumericInputWithDefault = React.memo(function NumericInputWithDefault(
props: NumericInputWithDefaultProps,
) {
const { value, defaultValue, onValueChange, onBlur, ...rest } = props;
const [hasChanged, setHasChanged] = useState(false);
let effectiveValue = value;
if (effectiveValue == null) {
effectiveValue = hasChanged ? '' : typeof defaultValue !== 'undefined' ? defaultValue : '';
}
return (
<NumericInput
value={effectiveValue}
onValueChange={(valueAsNumber, valueAsString, inputElement) => {
setHasChanged(true);
onValueChange?.(valueAsNumber, valueAsString, inputElement);
}}
onBlur={e => {
setHasChanged(false);
onBlur?.(e);
}}
{...rest}
/>
);
});

View File

@ -16,6 +16,7 @@
* limitations under the License.
*/
import type { JSX } from 'react';
import React, { useState } from 'react';
import type { DruidError, RowColumn } from '../../utils';
@ -61,22 +62,26 @@ export const QueryErrorPane = React.memo(function QueryErrorPane(props: QueryErr
return (
<div className="query-error-pane">
{suggestionElement}
{error.error && <p>{`Error: ${error.error}`}</p>}
{error.error && (
<p>{`Error: ${error.category}${
error.persona && error.persona !== 'USER' ? ` (${error.persona})` : ''
}`}</p>
)}
{error.errorMessageWithoutExpectation && (
<p>
{position ? (
<HighlightText
text={error.errorMessageWithoutExpectation}
find={position.match}
replace={
find={/\(line \[\d+], column \[\d+]\)/}
replace={found => (
<a
onClick={() => {
moveCursorTo(position);
}}
>
{position.match}
{found}
</a>
}
)}
/>
) : (
error.errorMessageWithoutExpectation

View File

@ -19,8 +19,8 @@
import { Button, Icon } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { Popover2 } from '@blueprintjs/popover2';
import type { Column, QueryResult } from '@druid-toolkit/query';
import classNames from 'classnames';
import type { Column, QueryResult } from 'druid-query-toolkit';
import React, { useEffect, useState } from 'react';
import type { RowRenderProps } from 'react-table';
import ReactTable from 'react-table';

View File

@ -33,13 +33,11 @@ const DELAYS: DelayLabel[] = [
];
export interface RefreshButtonProps {
onRefresh: (auto: boolean) => void;
onRefresh(auto: boolean): void;
localStorageKey?: LocalStorageKeys;
}
export const RefreshButton = React.memo(function RefreshButton(props: RefreshButtonProps) {
const { onRefresh, localStorageKey } = props;
return (
<TimedButton
className="refresh-button"
@ -48,9 +46,8 @@ export const RefreshButton = React.memo(function RefreshButton(props: RefreshBut
delays={DELAYS}
icon={IconNames.REFRESH}
text="Refresh"
onRefresh={onRefresh}
foregroundOnly
localStorageKey={localStorageKey}
{...props}
/>
);
});

View File

@ -39,7 +39,7 @@ export class BarGroup extends React.Component<BarGroupProps> {
return nextProps.hoverOn === this.props.hoverOn;
}
render(): JSX.Element[] | null {
render() {
const { dataToRender, changeActiveDatasource, xScale, yScale, onHoverBar, barWidth } =
this.props;
if (dataToRender === undefined) return null;

View File

@ -16,8 +16,8 @@
* limitations under the License.
*/
import { sane } from '@druid-toolkit/query';
import { render } from '@testing-library/react';
import { sane } from 'druid-query-toolkit';
import React from 'react';
import { Capabilities } from '../../helpers';

View File

@ -525,7 +525,7 @@ ORDER BY "start" DESC`;
);
}
render(): JSX.Element {
render() {
const { capabilities } = this.props;
const { datasources, activeDataType, activeDatasource, startDate, endDate } = this.state;

View File

@ -137,7 +137,7 @@ export class ShowLog extends React.PureComponent<ShowLogProps, ShowLogState> {
}
};
render(): JSX.Element {
render() {
const { endpoint, downloadFilename, tail } = this.props;
const { logState } = this.state;

View File

@ -57,13 +57,13 @@ export const TableCell = React.memo(function TableCell(props: TableCellProps) {
const { value, unlimited } = props;
const [showValue, setShowValue] = useState<string | undefined>();
function renderShowValueDialog(): JSX.Element | undefined {
function renderShowValueDialog() {
if (!showValue) return;
return <ShowValueDialog onClose={() => setShowValue(undefined)} str={showValue} />;
}
function renderTruncated(str: string): JSX.Element {
function renderTruncated(str: string) {
if (str.length <= MAX_CHARS_TO_SHOW) {
return <div className="table-cell plain">{str}</div>;
}

View File

@ -414,7 +414,7 @@ export class ConsoleApplication extends React.PureComponent<
);
};
render(): JSX.Element {
render() {
const { capabilities, capabilitiesLoading } = this.state;
if (capabilitiesLoading) {

View File

@ -0,0 +1,164 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Button, Classes, Code, Dialog, Intent } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import React, { useState } from 'react';
import type { Field } from '../../components';
import { AutoForm, ExternalLink, Loader } from '../../components';
import { useQueryManager } from '../../hooks';
import { getLink } from '../../links';
import { Api, AppToaster } from '../../singletons';
import { getDruidErrorMessage } from '../../utils';
interface CompactionDynamicConfig {
compactionTaskSlotRatio: number;
maxCompactionTaskSlots: number;
}
const DEFAULT_RATIO = 0.1;
const DEFAULT_MAX = 2147483647;
const COMPACTION_DYNAMIC_CONFIG_FIELDS: Field<CompactionDynamicConfig>[] = [
{
name: 'compactionTaskSlotRatio',
type: 'ratio',
defaultValue: DEFAULT_RATIO,
info: <>The ratio of the total task slots to the compaction task slots.</>,
},
{
name: 'maxCompactionTaskSlots',
type: 'number',
defaultValue: DEFAULT_MAX,
info: <>The maximum number of task slots for compaction tasks</>,
min: 1,
},
];
export interface CompactionDynamicConfigDialogProps {
onClose(): void;
}
export const CompactionDynamicConfigDialog = React.memo(function CompactionDynamicConfigDialog(
props: CompactionDynamicConfigDialogProps,
) {
const { onClose } = props;
const [dynamicConfig, setDynamicConfig] = useState<
Partial<CompactionDynamicConfig> | undefined
>();
useQueryManager<null, Record<string, any>>({
initQuery: null,
processQuery: async () => {
try {
const c = (await Api.instance.get('/druid/coordinator/v1/config/compaction')).data;
setDynamicConfig({
compactionTaskSlotRatio: c.compactionTaskSlotRatio ?? DEFAULT_RATIO,
maxCompactionTaskSlots: c.maxCompactionTaskSlots ?? DEFAULT_MAX,
});
} catch (e) {
AppToaster.show({
icon: IconNames.ERROR,
intent: Intent.DANGER,
message: `Could not load compaction dynamic config: ${getDruidErrorMessage(e)}`,
});
onClose();
}
return {};
},
});
async function saveConfig() {
if (!dynamicConfig) return;
try {
// This API is terrible. https://druid.apache.org/docs/latest/operations/api-reference.html#automatic-compaction-configuration
await Api.instance.post(
`/druid/coordinator/v1/config/compaction/taskslots?ratio=${
dynamicConfig.compactionTaskSlotRatio ?? DEFAULT_RATIO
}&max=${dynamicConfig.maxCompactionTaskSlots ?? DEFAULT_MAX}`,
{},
);
} catch (e) {
AppToaster.show({
icon: IconNames.ERROR,
intent: Intent.DANGER,
message: `Could not save compaction dynamic config: ${getDruidErrorMessage(e)}`,
});
}
AppToaster.show({
message: 'Saved compaction dynamic config',
intent: Intent.SUCCESS,
});
onClose();
}
return (
<Dialog
className="compaction-dynamic-config-dialog"
onClose={onClose}
title="Compaction dynamic config"
isOpen
>
{dynamicConfig ? (
<>
<div className={Classes.DIALOG_BODY}>
<p>
Edit the compaction dynamic configuration on the fly. For more information please
refer to the{' '}
<ExternalLink
href={`${getLink(
'DOCS',
)}/operations/api-reference.html#automatic-compaction-configuration`}
>
documentation
</ExternalLink>
.
</p>
<p>
The maximum number of task slots used for compaction will be{' '}
<Code>{`clamp(floor(${
dynamicConfig.compactionTaskSlotRatio ?? DEFAULT_RATIO
} * total_task_slots), 1, ${
dynamicConfig.maxCompactionTaskSlots ?? DEFAULT_MAX
})`}</Code>
.
</p>
<AutoForm
fields={COMPACTION_DYNAMIC_CONFIG_FIELDS}
model={dynamicConfig}
onChange={setDynamicConfig}
/>
</div>
<div className={Classes.DIALOG_FOOTER}>
<div className={Classes.DIALOG_FOOTER_ACTIONS}>
<Button
text="Save"
onClick={() => void saveConfig()}
intent={Intent.PRIMARY}
rightIcon={IconNames.TICK}
/>
</div>
</div>
</>
) : (
<Loader />
)}
</Dialog>
);
});

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { L } from 'druid-query-toolkit';
import { L } from '@druid-toolkit/query';
import React from 'react';
import ReactTable from 'react-table';

View File

@ -16,8 +16,8 @@
* limitations under the License.
*/
import type { QueryResult } from 'druid-query-toolkit';
import { QueryRunner, T } from 'druid-query-toolkit';
import type { QueryResult } from '@druid-toolkit/query';
import { QueryRunner, T } from '@druid-toolkit/query';
import React from 'react';
import { Loader, RecordTablePane } from '../../../components';

View File

@ -175,7 +175,7 @@ export class DoctorDialog extends React.PureComponent<DoctorDialogProps, DoctorD
}
}
render(): JSX.Element {
render() {
const { onClose } = this.props;
return (

View File

@ -20,6 +20,7 @@ import { Button, Classes, Dialog, Tab, Tabs } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import classNames from 'classnames';
import * as JSONBig from 'json-bigint-native';
import type { JSX } from 'react';
import React, { useState } from 'react';
import { ShowValue } from '../../components';

View File

@ -20,6 +20,7 @@ export * from './about-dialog/about-dialog';
export * from './alert-dialog/alert-dialog';
export * from './async-action-dialog/async-action-dialog';
export * from './compaction-config-dialog/compaction-config-dialog';
export * from './compaction-dynamic-config-dialog/compaction-dynamic-config-dialog';
export * from './coordinator-dynamic-config-dialog/coordinator-dynamic-config-dialog';
export * from './diff-dialog/diff-dialog';
export * from './doctor-dialog/doctor-dialog';

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { N } from 'druid-query-toolkit';
import { N } from '@druid-toolkit/query';
import React from 'react';
import ReactTable from 'react-table';

View File

@ -17,6 +17,7 @@
*/
import { Button, Classes, Dialog, Intent, NumericInput } from '@blueprintjs/core';
import type { JSX } from 'react';
import React, { useState } from 'react';
const DEFAULT_MIN_VALUE = 1;

View File

@ -16,8 +16,8 @@
* limitations under the License.
*/
import type { QueryResult } from 'druid-query-toolkit';
import { QueryRunner } from 'druid-query-toolkit';
import type { QueryResult } from '@druid-toolkit/query';
import { QueryRunner } from '@druid-toolkit/query';
import React from 'react';
import { Loader, RecordTablePane } from '../../../components';

View File

@ -112,7 +112,7 @@ export class SnitchDialog extends React.PureComponent<SnitchDialogProps, SnitchD
);
}
renderHistoryDialog(): JSX.Element | null {
renderHistoryDialog() {
const { title, historyRecords } = this.props;
if (!historyRecords) return null;
@ -155,7 +155,7 @@ export class SnitchDialog extends React.PureComponent<SnitchDialogProps, SnitchD
disabled={saveDisabled}
text="Save"
onClick={this.save}
intent={Intent.PRIMARY as any}
intent={Intent.PRIMARY}
rightIcon={IconNames.TICK}
/>
) : (
@ -163,7 +163,7 @@ export class SnitchDialog extends React.PureComponent<SnitchDialogProps, SnitchD
disabled={saveDisabled}
text="Next"
onClick={this.goToFinalStep}
intent={Intent.PRIMARY as any}
intent={Intent.PRIMARY}
rightIcon={IconNames.ARROW_RIGHT}
/>
)}
@ -171,7 +171,7 @@ export class SnitchDialog extends React.PureComponent<SnitchDialogProps, SnitchD
);
}
render(): JSX.Element | null {
render() {
const { children, saveDisabled } = this.props;
const { showFinalStep, showHistory } = this.state;

View File

@ -55,7 +55,7 @@ export const StatusDialog = React.memo(function StatusDialog(props: StatusDialog
},
});
function renderContent(): JSX.Element | undefined {
function renderContent() {
if (responseState.loading) return <Loader />;
if (responseState.error) {

View File

@ -0,0 +1,97 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { AsyncStatusResponse } from './async-query';
/*
SELECT
"channel",
COUNT(*) AS "Count"
FROM "wikipedia"
GROUP BY 1
ORDER BY 2 DESC
LIMIT 2
*/
export const SUCCESS_ASYNC_STATUS: AsyncStatusResponse = {
queryId: 'query-ad84d20a-c331-4ee9-ac59-83024e369cf1',
state: 'SUCCESS',
createdAt: '2023-07-05T21:33:19.147Z',
schema: [
{
name: 'channel',
type: 'VARCHAR',
nativeType: 'STRING',
},
{
name: 'Count',
type: 'BIGINT',
nativeType: 'LONG',
},
],
durationMs: 29168,
result: {
numTotalRows: 2,
totalSizeInBytes: 116,
dataSource: '__query_select',
sampleRecords: [
['#en.wikipedia', 6650],
['#sh.wikipedia', 3969],
],
pages: [
{
numRows: 2,
sizeInBytes: 116,
id: 0,
},
],
},
};
/*
REPLACE INTO "k" OVERWRITE ALL
WITH "ext" AS (SELECT *
FROM TABLE(
EXTERN(
'{"type":"local","filter":"blah.json_","baseDir":"/"}',
'{"type":"json"}'
)
) EXTEND ("timestamp" VARCHAR, "session" VARCHAR))
SELECT
TIME_PARSE("timestamp") AS "__time",
"session"
FROM "ext"
PARTITIONED BY DAY
*/
export const FAILED_ASYNC_STATUS: AsyncStatusResponse = {
queryId: 'query-36ea273a-bd6d-48de-b890-2d853d879bf8',
state: 'FAILED',
createdAt: '2023-07-05T21:40:39.986Z',
durationMs: 11217,
errorDetails: {
error: 'druidException',
errorCode: 'UnknownError',
persona: 'USER',
category: 'UNCATEGORIZED',
errorMessage: 'java.io.UncheckedIOException: /',
context: {
message: 'java.io.UncheckedIOException: /',
},
},
};

View File

@ -0,0 +1,37 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { ErrorResponse } from '../../utils';
export type AsyncState = 'ACCEPTED' | 'RUNNING' | 'SUCCESS' | 'FAILED';
export interface AsyncStatusResponse {
queryId: string;
state: AsyncState;
createdAt: string;
durationMs: number;
schema?: { name: string; type: string; nativeType: string }[];
result?: {
dataSource: string;
sampleRecords: any[][];
numTotalRows: number;
totalSizeInBytes: number;
pages: any[];
};
errorDetails?: ErrorResponse;
}

View File

@ -20,7 +20,7 @@ import { Code } from '@blueprintjs/core';
import React from 'react';
import type { Field } from '../../components';
import { deepGet, deepSet, oneOf } from '../../utils';
import { deepGet, deepSet, oneOfKnown } from '../../utils';
export interface CompactionConfig {
dataSource: string;
@ -43,6 +43,7 @@ export function compactionConfigHasLegacyInputSegmentSizeBytesSet(
);
}
const KNOWN_PARTITION_TYPES = ['dynamic', 'hashed', 'single_dim', 'range'];
export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
{
name: 'skipOffsetFromLatest',
@ -74,14 +75,16 @@ export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
name: 'tuningConfig.partitionsSpec.maxRowsPerSegment',
type: 'number',
defaultValue: 5000000,
defined: t => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'dynamic',
defined: t =>
oneOfKnown(deepGet(t, 'tuningConfig.partitionsSpec.type'), KNOWN_PARTITION_TYPES, 'dynamic'),
info: <>Determines how many rows are in each segment.</>,
},
{
name: 'tuningConfig.partitionsSpec.maxTotalRows',
type: 'number',
defaultValue: 20000000,
defined: t => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'dynamic',
defined: t =>
oneOfKnown(deepGet(t, 'tuningConfig.partitionsSpec.type'), KNOWN_PARTITION_TYPES, 'dynamic'),
info: <>Total number of rows in segments waiting for being pushed.</>,
},
// partitionsSpec type: hashed
@ -91,7 +94,7 @@ export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
zeroMeansUndefined: true,
placeholder: `(defaults to 500000)`,
defined: t =>
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
oneOfKnown(deepGet(t, 'tuningConfig.partitionsSpec.type'), KNOWN_PARTITION_TYPES, 'hashed') &&
!deepGet(t, 'tuningConfig.partitionsSpec.numShards') &&
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
info: (
@ -121,7 +124,7 @@ export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
type: 'number',
zeroMeansUndefined: true,
defined: t =>
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
oneOfKnown(deepGet(t, 'tuningConfig.partitionsSpec.type'), KNOWN_PARTITION_TYPES, 'hashed') &&
!deepGet(t, 'tuningConfig.partitionsSpec.numShards') &&
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
info: (
@ -150,7 +153,7 @@ export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
type: 'number',
zeroMeansUndefined: true,
defined: t =>
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
oneOfKnown(deepGet(t, 'tuningConfig.partitionsSpec.type'), KNOWN_PARTITION_TYPES, 'hashed') &&
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment') &&
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
info: (
@ -176,21 +179,28 @@ export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
name: 'tuningConfig.partitionsSpec.partitionDimensions',
type: 'string-array',
placeholder: '(all dimensions)',
defined: t => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed',
defined: t =>
oneOfKnown(deepGet(t, 'tuningConfig.partitionsSpec.type'), KNOWN_PARTITION_TYPES, 'hashed'),
info: <p>The dimensions to partition on. Leave blank to select all dimensions.</p>,
},
// partitionsSpec type: single_dim, range
{
name: 'tuningConfig.partitionsSpec.partitionDimension',
type: 'string',
defined: t => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim',
defined: t =>
oneOfKnown(
deepGet(t, 'tuningConfig.partitionsSpec.type'),
KNOWN_PARTITION_TYPES,
'single_dim',
),
required: true,
info: <p>The dimension to partition on.</p>,
},
{
name: 'tuningConfig.partitionsSpec.partitionDimensions',
type: 'string-array',
defined: t => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'range',
defined: t =>
oneOfKnown(deepGet(t, 'tuningConfig.partitionsSpec.type'), KNOWN_PARTITION_TYPES, 'range'),
required: true,
info: <p>The dimensions to partition on.</p>,
},
@ -199,8 +209,12 @@ export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
type: 'number',
zeroMeansUndefined: true,
defined: t =>
oneOf(deepGet(t, 'tuningConfig.partitionsSpec.type'), 'single_dim', 'range') &&
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
oneOfKnown(
deepGet(t, 'tuningConfig.partitionsSpec.type'),
KNOWN_PARTITION_TYPES,
'single_dim',
'range',
) && !deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
required: t =>
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment') &&
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
@ -222,8 +236,12 @@ export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
type: 'number',
zeroMeansUndefined: true,
defined: t =>
oneOf(deepGet(t, 'tuningConfig.partitionsSpec.type'), 'single_dim', 'range') &&
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
oneOfKnown(
deepGet(t, 'tuningConfig.partitionsSpec.type'),
KNOWN_PARTITION_TYPES,
'single_dim',
'range',
) && !deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
required: t =>
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment') &&
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
@ -241,7 +259,13 @@ export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
name: 'tuningConfig.partitionsSpec.assumeGrouped',
type: 'boolean',
defaultValue: false,
defined: t => oneOf(deepGet(t, 'tuningConfig.partitionsSpec.type'), 'single_dim', 'range'),
defined: t =>
oneOfKnown(
deepGet(t, 'tuningConfig.partitionsSpec.type'),
KNOWN_PARTITION_TYPES,
'single_dim',
'range',
),
info: (
<p>
Assume that input data has already been grouped on time and dimensions. Ingestion will run
@ -287,7 +311,13 @@ export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
defaultValue: 10,
min: 1,
defined: t =>
oneOf(deepGet(t, 'tuningConfig.partitionsSpec.type'), 'hashed', 'single_dim', 'range'),
oneOfKnown(
deepGet(t, 'tuningConfig.partitionsSpec.type'),
KNOWN_PARTITION_TYPES,
'hashed',
'single_dim',
'range',
),
info: <>Maximum number of merge tasks which can be run at the same time.</>,
},
{

View File

@ -17,7 +17,7 @@
*/
import type { Field } from '../../components';
import { filterMap, typeIs } from '../../utils';
import { filterMap, typeIsKnown } from '../../utils';
import type { SampleResponse } from '../../utils/sampler';
import { getHeaderNamesFromSampleResponse } from '../../utils/sampler';
import { guessColumnTypeFromSampleResponse } from '../ingestion-spec/ingestion-spec';
@ -37,6 +37,7 @@ export interface DimensionSpec {
readonly multiValueHandling?: string;
}
const KNOWN_TYPES = ['string', 'long', 'float', 'double', 'json'];
export const DIMENSION_SPEC_FIELDS: Field<DimensionSpec>[] = [
{
name: 'name',
@ -48,18 +49,18 @@ export const DIMENSION_SPEC_FIELDS: Field<DimensionSpec>[] = [
name: 'type',
type: 'string',
required: true,
suggestions: ['string', 'long', 'float', 'double', 'json'],
suggestions: KNOWN_TYPES,
},
{
name: 'createBitmapIndex',
type: 'boolean',
defined: typeIs('string'),
defined: typeIsKnown(KNOWN_TYPES, 'string'),
defaultValue: true,
},
{
name: 'multiValueHandling',
type: 'string',
defined: typeIs('string'),
defined: typeIsKnown(KNOWN_TYPES, 'string'),
defaultValue: 'SORTED_ARRAY',
suggestions: ['SORTED_ARRAY', 'SORTED_SET', 'ARRAY'],
},

View File

@ -40,115 +40,53 @@ PARTITIONED BY ALL TIME
}
*/
export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
{
task: 'query-b55f3432-7810-4529-80ed-780a926a6f03',
export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskReport({
multiStageQuery: {
type: 'multiStageQuery',
taskId: 'query-5aa683e2-a6ee-4655-a834-a643e91055b1',
payload: {
type: 'query_controller',
id: 'query-b55f3432-7810-4529-80ed-780a926a6f03',
spec: {
query: {
queryType: 'scan',
dataSource: {
type: 'external',
inputSource: {
type: 'http',
uris: ['https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz'],
},
inputFormat: {
type: 'json',
keepNullColumns: false,
assumeNewlineDelimited: false,
useJsonNodeReader: false,
},
signature: [
{ name: 'timestamp', type: 'STRING' },
{ name: 'agent_type', type: 'STRING' },
status: {
status: 'SUCCESS',
startTime: '2023-06-19T05:39:26.377Z',
durationMs: 23170,
pendingTasks: 0,
runningTasks: 2,
},
stages: [
{
stageNumber: 0,
definition: {
id: '8af42220-2724-4a76-b39f-c2f98df2de69_0',
input: [
{
type: 'external',
inputSource: {
type: 'http',
uris: ['https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz'],
},
inputFormat: {
type: 'json',
keepNullColumns: false,
assumeNewlineDelimited: false,
useJsonNodeReader: false,
},
signature: [
{
name: 'timestamp',
type: 'STRING',
},
{
name: 'agent_type',
type: 'STRING',
},
],
},
],
},
intervals: {
type: 'intervals',
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
virtualColumns: [
{
type: 'expression',
name: 'v0',
expression: 'timestamp_parse("timestamp",null,\'UTC\')',
outputType: 'LONG',
},
],
resultFormat: 'compactedList',
columns: ['agent_type', 'v0'],
legacy: false,
context: {
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxNumTasks: 2,
queryId: 'b55f3432-7810-4529-80ed-780a926a6f03',
scanSignature: '[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '{"type":"all"}',
sqlQueryId: 'b55f3432-7810-4529-80ed-780a926a6f03',
sqlReplaceTimeChunks: 'all',
},
granularity: { type: 'all' },
},
columnMappings: [
{ queryColumn: 'v0', outputColumn: '__time' },
{ queryColumn: 'agent_type', outputColumn: 'agent_type' },
],
destination: {
type: 'dataSource',
dataSource: 'kttm_simple',
segmentGranularity: { type: 'all' },
replaceTimeChunks: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
assignmentStrategy: 'max',
tuningConfig: { maxNumWorkers: 1, maxRowsInMemory: 100000, rowsPerSegment: 3000000 },
},
sqlQuery:
'REPLACE INTO "kttm_simple" OVERWRITE ALL\nSELECT\n TIME_PARSE("timestamp") AS "__time",\n "agent_type"\nFROM TABLE(\n EXTERN(\n \'{"type":"http","uris":["https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz"]}\',\n \'{"type":"json"}\'\n )\n) EXTEND ("timestamp" VARCHAR, "agent_type" VARCHAR)\nPARTITIONED BY ALL TIME',
sqlQueryContext: {
finalizeAggregations: false,
maxParseExceptions: 0,
sqlQueryId: 'b55f3432-7810-4529-80ed-780a926a6f03',
groupByEnableMultiValueUnnesting: false,
sqlInsertSegmentGranularity: '{"type":"all"}',
maxNumTasks: 2,
sqlReplaceTimeChunks: 'all',
queryId: 'b55f3432-7810-4529-80ed-780a926a6f03',
},
sqlTypeNames: ['TIMESTAMP', 'VARCHAR'],
context: { forceTimeChunkLock: true, useLineageBasedSegmentAllocation: true },
groupId: 'query-b55f3432-7810-4529-80ed-780a926a6f03',
dataSource: 'kttm_simple',
resource: {
availabilityGroup: 'query-b55f3432-7810-4529-80ed-780a926a6f03',
requiredCapacity: 1,
},
},
},
{
multiStageQuery: {
type: 'multiStageQuery',
taskId: 'query-b55f3432-7810-4529-80ed-780a926a6f03',
payload: {
status: {
status: 'SUCCESS',
startTime: '2023-03-27T22:17:02.401Z',
durationMs: 28854,
pendingTasks: 0,
runningTasks: 2,
},
stages: [
{
stageNumber: 0,
definition: {
id: '8984a4c0-89a0-4a0a-9eaa-bf03088da3e3_0',
input: [
{
processor: {
type: 'scan',
query: {
queryType: 'scan',
dataSource: {
type: 'external',
inputSource: {
type: 'http',
@ -163,157 +101,333 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
useJsonNodeReader: false,
},
signature: [
{ name: 'timestamp', type: 'STRING' },
{ name: 'agent_type', type: 'STRING' },
],
},
],
processor: {
type: 'scan',
query: {
queryType: 'scan',
dataSource: { type: 'inputNumber', inputNumber: 0 },
intervals: {
type: 'intervals',
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
virtualColumns: [
{
type: 'expression',
name: 'v0',
expression: 'timestamp_parse("timestamp",null,\'UTC\')',
outputType: 'LONG',
name: 'timestamp',
type: 'STRING',
},
{
name: 'agent_type',
type: 'STRING',
},
],
resultFormat: 'compactedList',
columns: ['agent_type', 'v0'],
legacy: false,
context: {
__timeColumn: 'v0',
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxNumTasks: 2,
queryId: 'b55f3432-7810-4529-80ed-780a926a6f03',
scanSignature:
'[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '{"type":"all"}',
sqlQueryId: 'b55f3432-7810-4529-80ed-780a926a6f03',
sqlReplaceTimeChunks: 'all',
},
granularity: { type: 'all' },
},
},
signature: [
{ name: '__boost', type: 'LONG' },
{ name: 'agent_type', type: 'STRING' },
{ name: 'v0', type: 'LONG' },
],
shuffleSpec: {
type: 'targetSize',
clusterBy: { columns: [{ columnName: '__boost', order: 'ASCENDING' }] },
targetSize: 3000000,
},
maxWorkerCount: 1,
shuffleCheckHasMultipleValues: true,
maxInputBytesPerWorker: 10737418240,
},
phase: 'FINISHED',
workerCount: 1,
partitionCount: 1,
startTime: '2023-03-27T22:17:02.792Z',
duration: 24236,
sort: true,
},
{
stageNumber: 1,
definition: {
id: '8984a4c0-89a0-4a0a-9eaa-bf03088da3e3_1',
input: [{ type: 'stage', stage: 0 }],
processor: {
type: 'segmentGenerator',
dataSchema: {
dataSource: 'kttm_simple',
timestampSpec: { column: '__time', format: 'millis', missingValue: null },
dimensionsSpec: {
dimensions: [
{
type: 'string',
name: 'agent_type',
multiValueHandling: 'SORTED_ARRAY',
createBitmapIndex: true,
},
],
dimensionExclusions: ['__time'],
includeAllDimensions: false,
useSchemaDiscovery: false,
},
metricsSpec: [],
granularitySpec: {
type: 'arbitrary',
queryGranularity: { type: 'none' },
rollup: false,
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
transformSpec: { filter: null, transforms: [] },
intervals: {
type: 'intervals',
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
columnMappings: [
{ queryColumn: 'v0', outputColumn: '__time' },
{ queryColumn: 'agent_type', outputColumn: 'agent_type' },
virtualColumns: [
{
type: 'expression',
name: 'v0',
expression: 'timestamp_parse("timestamp",null,\'UTC\')',
outputType: 'LONG',
},
],
tuningConfig: {
maxNumWorkers: 1,
maxRowsInMemory: 100000,
rowsPerSegment: 3000000,
resultFormat: 'compactedList',
columns: ['agent_type', 'v0'],
legacy: false,
context: {
__timeColumn: 'v0',
__user: 'allowAll',
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxNumTasks: 2,
maxParseExceptions: 0,
queryId: '5aa683e2-a6ee-4655-a834-a643e91055b1',
scanSignature:
'[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '{"type":"all"}',
sqlQueryId: '5aa683e2-a6ee-4655-a834-a643e91055b1',
sqlReplaceTimeChunks: 'all',
},
granularity: {
type: 'all',
},
},
signature: [],
maxWorkerCount: 1,
maxInputBytesPerWorker: 10737418240,
},
phase: 'FINISHED',
workerCount: 1,
partitionCount: 1,
startTime: '2023-03-27T22:17:26.978Z',
duration: 4276,
signature: [
{
name: '__boost',
type: 'LONG',
},
{
name: 'agent_type',
type: 'STRING',
},
{
name: 'v0',
type: 'LONG',
},
],
shuffleSpec: {
type: 'targetSize',
clusterBy: {
columns: [
{
columnName: '__boost',
order: 'ASCENDING',
},
],
},
targetSize: 3000000,
},
maxWorkerCount: 1,
shuffleCheckHasMultipleValues: true,
},
],
counters: {
phase: 'FINISHED',
workerCount: 1,
partitionCount: 1,
startTime: '2023-06-19T05:39:26.711Z',
duration: 20483,
sort: true,
},
{
stageNumber: 1,
definition: {
id: '8af42220-2724-4a76-b39f-c2f98df2de69_1',
input: [
{
type: 'stage',
stage: 0,
},
],
processor: {
type: 'segmentGenerator',
dataSchema: {
dataSource: 'kttm_simple',
timestampSpec: {
column: '__time',
format: 'millis',
missingValue: null,
},
dimensionsSpec: {
dimensions: [
{
type: 'string',
name: 'agent_type',
multiValueHandling: 'SORTED_ARRAY',
createBitmapIndex: true,
},
],
dimensionExclusions: ['__time'],
includeAllDimensions: false,
useSchemaDiscovery: false,
},
metricsSpec: [],
granularitySpec: {
type: 'arbitrary',
queryGranularity: {
type: 'none',
},
rollup: false,
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
transformSpec: {
filter: null,
transforms: [],
},
},
columnMappings: [
{
queryColumn: 'v0',
outputColumn: '__time',
},
{
queryColumn: 'agent_type',
outputColumn: 'agent_type',
},
],
tuningConfig: {
maxNumWorkers: 1,
maxRowsInMemory: 100000,
rowsPerSegment: 3000000,
},
},
signature: [],
maxWorkerCount: 1,
},
phase: 'FINISHED',
workerCount: 1,
partitionCount: 1,
startTime: '2023-06-19T05:39:47.166Z',
duration: 2381,
},
],
counters: {
'0': {
'0': {
'0': {
input0: {
type: 'channel',
rows: [465346],
bytes: [360464067],
files: [1],
totalFiles: [1],
input0: {
type: 'channel',
rows: [465346],
bytes: [360464067],
files: [1],
totalFiles: [1],
},
output: {
type: 'channel',
rows: [465346],
bytes: [25430674],
frames: [4],
},
shuffle: {
type: 'channel',
rows: [465346],
bytes: [23570446],
frames: [38],
},
sortProgress: {
type: 'sortProgress',
totalMergingLevels: 3,
levelToTotalBatches: {
'0': 1,
'1': 1,
'2': 1,
},
output: { type: 'channel', rows: [465346], bytes: [25430674], frames: [4] },
shuffle: { type: 'channel', rows: [465346], bytes: [23570446], frames: [38] },
sortProgress: {
type: 'sortProgress',
totalMergingLevels: 3,
levelToTotalBatches: { '0': 1, '1': 1, '2': 1 },
levelToMergedBatches: { '0': 1, '1': 1, '2': 1 },
totalMergersForUltimateLevel: 1,
progressDigest: 1.0,
levelToMergedBatches: {
'0': 1,
'1': 1,
'2': 1,
},
totalMergersForUltimateLevel: 1,
progressDigest: 1.0,
},
},
'1': {
'0': {
input0: { type: 'channel', rows: [465346], bytes: [23570446], frames: [38] },
segmentGenerationProgress: {
type: 'segmentGenerationProgress',
rowsProcessed: 465346,
rowsPersisted: 465346,
rowsMerged: 465346,
rowsPushed: 465346,
},
},
'1': {
'0': {
input0: {
type: 'channel',
rows: [465346],
bytes: [23570446],
frames: [38],
},
segmentGenerationProgress: {
type: 'segmentGenerationProgress',
rowsProcessed: 465346,
rowsPersisted: 465346,
rowsMerged: 465346,
rowsPushed: 465346,
},
},
},
},
},
},
);
}).updateWithTaskPayload({
task: 'query-5aa683e2-a6ee-4655-a834-a643e91055b1',
payload: {
type: 'query_controller',
id: 'query-5aa683e2-a6ee-4655-a834-a643e91055b1',
spec: {
query: {
queryType: 'scan',
dataSource: {
type: 'external',
inputSource: {
type: 'http',
uris: ['https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz'],
},
inputFormat: {
type: 'json',
keepNullColumns: false,
assumeNewlineDelimited: false,
useJsonNodeReader: false,
},
signature: [
{
name: 'timestamp',
type: 'STRING',
},
{
name: 'agent_type',
type: 'STRING',
},
],
},
intervals: {
type: 'intervals',
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
virtualColumns: [
{
type: 'expression',
name: 'v0',
expression: 'timestamp_parse("timestamp",null,\'UTC\')',
outputType: 'LONG',
},
],
resultFormat: 'compactedList',
columns: ['agent_type', 'v0'],
legacy: false,
context: {
__user: 'allowAll',
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxNumTasks: 2,
maxParseExceptions: 0,
queryId: '5aa683e2-a6ee-4655-a834-a643e91055b1',
scanSignature: '[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '{"type":"all"}',
sqlQueryId: '5aa683e2-a6ee-4655-a834-a643e91055b1',
sqlReplaceTimeChunks: 'all',
},
granularity: {
type: 'all',
},
},
columnMappings: [
{
queryColumn: 'v0',
outputColumn: '__time',
},
{
queryColumn: 'agent_type',
outputColumn: 'agent_type',
},
],
destination: {
type: 'dataSource',
dataSource: 'kttm_simple',
segmentGranularity: {
type: 'all',
},
replaceTimeChunks: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
assignmentStrategy: 'max',
tuningConfig: {
maxNumWorkers: 1,
maxRowsInMemory: 100000,
rowsPerSegment: 3000000,
},
},
sqlQuery:
'REPLACE INTO "kttm_simple" OVERWRITE ALL\nSELECT\n TIME_PARSE("timestamp") AS "__time",\n "agent_type"\nFROM TABLE(\n EXTERN(\n \'{"type":"http","uris":["https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz"]}\',\n \'{"type":"json"}\'\n )\n) EXTEND ("timestamp" VARCHAR, "agent_type" VARCHAR)\nPARTITIONED BY ALL TIME',
sqlQueryContext: {
finalizeAggregations: false,
sqlQueryId: '5aa683e2-a6ee-4655-a834-a643e91055b1',
groupByEnableMultiValueUnnesting: false,
sqlInsertSegmentGranularity: '{"type":"all"}',
maxNumTasks: 2,
sqlReplaceTimeChunks: 'all',
queryId: '5aa683e2-a6ee-4655-a834-a643e91055b1',
},
sqlResultsContext: {
timeZone: 'UTC',
serializeComplexValues: true,
stringifyArrays: true,
},
sqlTypeNames: ['TIMESTAMP', 'VARCHAR'],
context: {
forceTimeChunkLock: true,
useLineageBasedSegmentAllocation: true,
},
groupId: 'query-5aa683e2-a6ee-4655-a834-a643e91055b1',
dataSource: 'kttm_simple',
resource: {
availabilityGroup: 'query-5aa683e2-a6ee-4655-a834-a643e91055b1',
requiredCapacity: 1,
},
},
});

View File

@ -41,152 +41,91 @@ PARTITIONED BY DAY
}
*/
export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
{
task: 'query-614dc100-a4b9-40a3-95ce-1227fa7ea765',
export const EXECUTION_INGEST_ERROR = Execution.fromTaskReport({
multiStageQuery: {
type: 'multiStageQuery',
taskId: 'query-8f889312-e989-4b4c-9895-485a1fe796d3',
payload: {
type: 'query_controller',
id: 'query-614dc100-a4b9-40a3-95ce-1227fa7ea765',
spec: {
query: {
queryType: 'scan',
dataSource: {
type: 'external',
inputSource: {
type: 'http',
uris: ['https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json'],
},
inputFormat: {
type: 'json',
keepNullColumns: false,
assumeNewlineDelimited: false,
useJsonNodeReader: false,
},
signature: [
{ name: 'timestamp', type: 'STRING' },
{ name: 'agent_type', type: 'STRING' },
],
status: {
status: 'FAILED',
errorReport: {
taskId: 'query-8f889312-e989-4b4c-9895-485a1fe796d3-worker0_0',
host: 'localhost',
error: {
errorCode: 'TooManyWarnings',
maxWarnings: 2,
rootErrorCode: 'CannotParseExternalData',
errorMessage: 'Too many warnings of type CannotParseExternalData generated (max = 2)',
},
intervals: {
type: 'intervals',
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
virtualColumns: [
{
type: 'expression',
name: 'v0',
expression: 'timestamp_parse("timestamp",null,\'UTC\')',
outputType: 'LONG',
},
],
resultFormat: 'compactedList',
columns: ['agent_type', 'v0'],
legacy: false,
context: {
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxNumTasks: 2,
maxParseExceptions: 2,
queryId: '614dc100-a4b9-40a3-95ce-1227fa7ea765',
scanSignature: '[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '"DAY"',
sqlQueryId: '614dc100-a4b9-40a3-95ce-1227fa7ea765',
sqlReplaceTimeChunks: 'all',
},
granularity: { type: 'all' },
},
columnMappings: [
{ queryColumn: 'v0', outputColumn: '__time' },
{ queryColumn: 'agent_type', outputColumn: 'agent_type' },
],
destination: {
type: 'dataSource',
dataSource: 'kttm-blank-lines',
segmentGranularity: 'DAY',
replaceTimeChunks: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
assignmentStrategy: 'max',
tuningConfig: { maxNumWorkers: 1, maxRowsInMemory: 100000, rowsPerSegment: 3000000 },
},
sqlQuery:
'REPLACE INTO "kttm-blank-lines" OVERWRITE ALL\nSELECT\n TIME_PARSE("timestamp") AS "__time",\n "agent_type"\nFROM TABLE(\n EXTERN(\n \'{"type":"http","uris":["https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json"]}\',\n \'{"type":"json"}\'\n )\n) EXTEND ("timestamp" VARCHAR, "agent_type" VARCHAR)\nPARTITIONED BY DAY',
sqlQueryContext: {
maxParseExceptions: 2,
finalizeAggregations: false,
sqlQueryId: '614dc100-a4b9-40a3-95ce-1227fa7ea765',
groupByEnableMultiValueUnnesting: false,
sqlInsertSegmentGranularity: '"DAY"',
maxNumTasks: 2,
sqlReplaceTimeChunks: 'all',
queryId: '614dc100-a4b9-40a3-95ce-1227fa7ea765',
},
sqlTypeNames: ['TIMESTAMP', 'VARCHAR'],
context: { forceTimeChunkLock: true, useLineageBasedSegmentAllocation: true },
groupId: 'query-614dc100-a4b9-40a3-95ce-1227fa7ea765',
dataSource: 'kttm-blank-lines',
resource: {
availabilityGroup: 'query-614dc100-a4b9-40a3-95ce-1227fa7ea765',
requiredCapacity: 1,
},
},
},
{
multiStageQuery: {
type: 'multiStageQuery',
taskId: 'query-614dc100-a4b9-40a3-95ce-1227fa7ea765',
payload: {
status: {
status: 'FAILED',
errorReport: {
taskId: 'query-614dc100-a4b9-40a3-95ce-1227fa7ea765-worker0_0',
host: 'localhost',
error: {
errorCode: 'TooManyWarnings',
maxWarnings: 2,
rootErrorCode: 'CannotParseExternalData',
errorMessage: 'Too many warnings of type CannotParseExternalData generated (max = 2)',
},
},
warnings: [
{
taskId: 'query-614dc100-a4b9-40a3-95ce-1227fa7ea765-worker0_0',
host: 'localhost:8101',
stageNumber: 0,
error: {
errorCode: 'CannotParseExternalData',
errorMessage:
'Unable to parse row [] (Path: https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json, Record: 3, Line: 3)',
},
exceptionStackTrace:
'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json, Record: 3, Line: 3)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:182)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:248)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:175)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:164)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:140)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:75)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:801)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:75)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
},
{
taskId: 'query-614dc100-a4b9-40a3-95ce-1227fa7ea765-worker0_0',
host: 'localhost:8101',
stageNumber: 0,
error: {
errorCode: 'CannotParseExternalData',
errorMessage:
'Unable to parse row [] (Path: https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json, Record: 6, Line: 7)',
},
exceptionStackTrace:
'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json, Record: 6, Line: 7)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:182)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:248)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:175)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:164)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:140)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:75)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:801)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:75)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
},
],
startTime: '2023-03-27T22:11:24.945Z',
durationMs: 14106,
pendingTasks: 0,
runningTasks: 2,
},
stages: [
warnings: [
{
taskId: 'query-8f889312-e989-4b4c-9895-485a1fe796d3-worker0_0',
host: 'localhost:8101',
stageNumber: 0,
definition: {
id: '0f627be4-63b6-4249-ba3d-71cd4a78faa2_0',
input: [
{
error: {
errorCode: 'CannotParseExternalData',
errorMessage:
'Unable to parse row [] (Path: https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json, Record: 3, Line: 3)',
},
exceptionStackTrace:
'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json, Record: 3, Line: 3)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:183)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:246)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:173)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:159)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:138)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:75)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:820)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:75)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
},
{
taskId: 'query-8f889312-e989-4b4c-9895-485a1fe796d3-worker0_0',
host: 'localhost:8101',
stageNumber: 0,
error: {
errorCode: 'CannotParseExternalData',
errorMessage:
'Unable to parse row [] (Path: https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json, Record: 6, Line: 7)',
},
exceptionStackTrace:
'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json, Record: 6, Line: 7)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:183)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:246)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:173)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:159)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:138)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:75)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:820)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:75)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
},
],
startTime: '2023-06-19T05:37:48.605Z',
durationMs: 14760,
pendingTasks: 0,
runningTasks: 2,
},
stages: [
{
stageNumber: 0,
definition: {
id: 'd337a3d8-e361-4795-8eaa-97ced72d9a7b_0',
input: [
{
type: 'external',
inputSource: {
type: 'http',
uris: [
'https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json',
],
},
inputFormat: {
type: 'json',
keepNullColumns: false,
assumeNewlineDelimited: false,
useJsonNodeReader: false,
},
signature: [
{
name: 'timestamp',
type: 'STRING',
},
{
name: 'agent_type',
type: 'STRING',
},
],
},
],
processor: {
type: 'scan',
query: {
queryType: 'scan',
dataSource: {
type: 'external',
inputSource: {
type: 'http',
@ -201,141 +140,312 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
useJsonNodeReader: false,
},
signature: [
{ name: 'timestamp', type: 'STRING' },
{ name: 'agent_type', type: 'STRING' },
],
},
],
processor: {
type: 'scan',
query: {
queryType: 'scan',
dataSource: { type: 'inputNumber', inputNumber: 0 },
intervals: {
type: 'intervals',
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
virtualColumns: [
{
type: 'expression',
name: 'v0',
expression: 'timestamp_parse("timestamp",null,\'UTC\')',
outputType: 'LONG',
name: 'timestamp',
type: 'STRING',
},
{
name: 'agent_type',
type: 'STRING',
},
],
resultFormat: 'compactedList',
columns: ['agent_type', 'v0'],
legacy: false,
context: {
__timeColumn: 'v0',
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxNumTasks: 2,
maxParseExceptions: 2,
queryId: '614dc100-a4b9-40a3-95ce-1227fa7ea765',
scanSignature:
'[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '"DAY"',
sqlQueryId: '614dc100-a4b9-40a3-95ce-1227fa7ea765',
sqlReplaceTimeChunks: 'all',
},
granularity: { type: 'all' },
},
},
signature: [
{ name: '__bucket', type: 'LONG' },
{ name: '__boost', type: 'LONG' },
{ name: 'agent_type', type: 'STRING' },
{ name: 'v0', type: 'LONG' },
],
shuffleSpec: {
type: 'targetSize',
clusterBy: {
columns: [
{ columnName: '__bucket', order: 'ASCENDING' },
{ columnName: '__boost', order: 'ASCENDING' },
],
bucketByCount: 1,
intervals: {
type: 'intervals',
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
targetSize: 3000000,
},
maxWorkerCount: 1,
shuffleCheckHasMultipleValues: true,
maxInputBytesPerWorker: 10737418240,
},
phase: 'FAILED',
workerCount: 1,
startTime: '2023-03-27T22:11:25.310Z',
duration: 13741,
sort: true,
},
{
stageNumber: 1,
definition: {
id: '0f627be4-63b6-4249-ba3d-71cd4a78faa2_1',
input: [{ type: 'stage', stage: 0 }],
processor: {
type: 'segmentGenerator',
dataSchema: {
dataSource: 'kttm-blank-lines',
timestampSpec: { column: '__time', format: 'millis', missingValue: null },
dimensionsSpec: {
dimensions: [
{
type: 'string',
name: 'agent_type',
multiValueHandling: 'SORTED_ARRAY',
createBitmapIndex: true,
},
],
dimensionExclusions: ['__time'],
includeAllDimensions: false,
useSchemaDiscovery: false,
virtualColumns: [
{
type: 'expression',
name: 'v0',
expression: 'timestamp_parse("timestamp",null,\'UTC\')',
outputType: 'LONG',
},
metricsSpec: [],
granularitySpec: {
type: 'arbitrary',
queryGranularity: { type: 'none' },
rollup: false,
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
transformSpec: { filter: null, transforms: [] },
},
columnMappings: [
{ queryColumn: 'v0', outputColumn: '__time' },
{ queryColumn: 'agent_type', outputColumn: 'agent_type' },
],
tuningConfig: {
maxNumWorkers: 1,
maxRowsInMemory: 100000,
rowsPerSegment: 3000000,
resultFormat: 'compactedList',
columns: ['agent_type', 'v0'],
legacy: false,
context: {
__timeColumn: 'v0',
__user: 'allowAll',
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxNumTasks: 2,
maxParseExceptions: 2,
queryId: '8f889312-e989-4b4c-9895-485a1fe796d3',
scanSignature:
'[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '"DAY"',
sqlQueryId: '8f889312-e989-4b4c-9895-485a1fe796d3',
sqlReplaceTimeChunks: 'all',
},
granularity: {
type: 'all',
},
},
signature: [],
maxWorkerCount: 1,
maxInputBytesPerWorker: 10737418240,
},
},
],
counters: {
'0': {
'0': {
input0: { type: 'channel', rows: [10], bytes: [7658], files: [1], totalFiles: [1] },
output: { type: 'channel', rows: [10], bytes: [712], frames: [1] },
sortProgress: {
type: 'sortProgress',
totalMergingLevels: 3,
levelToTotalBatches: { '0': 1, '1': 1, '2': -1 },
levelToMergedBatches: {},
totalMergersForUltimateLevel: -1,
progressDigest: 0.0,
signature: [
{
name: '__bucket',
type: 'LONG',
},
warnings: { type: 'warnings', CannotParseExternalData: 3 },
{
name: '__boost',
type: 'LONG',
},
{
name: 'agent_type',
type: 'STRING',
},
{
name: 'v0',
type: 'LONG',
},
],
shuffleSpec: {
type: 'targetSize',
clusterBy: {
columns: [
{
columnName: '__bucket',
order: 'ASCENDING',
},
{
columnName: '__boost',
order: 'ASCENDING',
},
],
bucketByCount: 1,
},
targetSize: 3000000,
},
maxWorkerCount: 1,
shuffleCheckHasMultipleValues: true,
},
phase: 'FAILED',
workerCount: 1,
startTime: '2023-06-19T05:37:48.952Z',
duration: 14412,
sort: true,
},
{
stageNumber: 1,
definition: {
id: 'd337a3d8-e361-4795-8eaa-97ced72d9a7b_1',
input: [
{
type: 'stage',
stage: 0,
},
],
processor: {
type: 'segmentGenerator',
dataSchema: {
dataSource: 'kttm-blank-lines',
timestampSpec: {
column: '__time',
format: 'millis',
missingValue: null,
},
dimensionsSpec: {
dimensions: [
{
type: 'string',
name: 'agent_type',
multiValueHandling: 'SORTED_ARRAY',
createBitmapIndex: true,
},
],
dimensionExclusions: ['__time'],
includeAllDimensions: false,
useSchemaDiscovery: false,
},
metricsSpec: [],
granularitySpec: {
type: 'arbitrary',
queryGranularity: {
type: 'none',
},
rollup: false,
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
transformSpec: {
filter: null,
transforms: [],
},
},
columnMappings: [
{
queryColumn: 'v0',
outputColumn: '__time',
},
{
queryColumn: 'agent_type',
outputColumn: 'agent_type',
},
],
tuningConfig: {
maxNumWorkers: 1,
maxRowsInMemory: 100000,
rowsPerSegment: 3000000,
},
},
signature: [],
maxWorkerCount: 1,
},
},
],
counters: {
'0': {
'0': {
input0: {
type: 'channel',
rows: [10],
bytes: [7658],
files: [1],
totalFiles: [1],
},
output: {
type: 'channel',
rows: [10],
bytes: [712],
frames: [1],
},
sortProgress: {
type: 'sortProgress',
totalMergingLevels: 3,
levelToTotalBatches: {
'0': 1,
'1': 1,
'2': -1,
},
levelToMergedBatches: {},
totalMergersForUltimateLevel: -1,
progressDigest: 0.0,
},
warnings: {
type: 'warnings',
CannotParseExternalData: 3,
},
},
},
},
},
},
);
}).updateWithTaskPayload({
task: 'query-8f889312-e989-4b4c-9895-485a1fe796d3',
payload: {
type: 'query_controller',
id: 'query-8f889312-e989-4b4c-9895-485a1fe796d3',
spec: {
query: {
queryType: 'scan',
dataSource: {
type: 'external',
inputSource: {
type: 'http',
uris: ['https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json'],
},
inputFormat: {
type: 'json',
keepNullColumns: false,
assumeNewlineDelimited: false,
useJsonNodeReader: false,
},
signature: [
{
name: 'timestamp',
type: 'STRING',
},
{
name: 'agent_type',
type: 'STRING',
},
],
},
intervals: {
type: 'intervals',
intervals: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
virtualColumns: [
{
type: 'expression',
name: 'v0',
expression: 'timestamp_parse("timestamp",null,\'UTC\')',
outputType: 'LONG',
},
],
resultFormat: 'compactedList',
columns: ['agent_type', 'v0'],
legacy: false,
context: {
__user: 'allowAll',
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxNumTasks: 2,
maxParseExceptions: 2,
queryId: '8f889312-e989-4b4c-9895-485a1fe796d3',
scanSignature: '[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '"DAY"',
sqlQueryId: '8f889312-e989-4b4c-9895-485a1fe796d3',
sqlReplaceTimeChunks: 'all',
},
granularity: {
type: 'all',
},
},
columnMappings: [
{
queryColumn: 'v0',
outputColumn: '__time',
},
{
queryColumn: 'agent_type',
outputColumn: 'agent_type',
},
],
destination: {
type: 'dataSource',
dataSource: 'kttm-blank-lines',
segmentGranularity: 'DAY',
replaceTimeChunks: ['-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z'],
},
assignmentStrategy: 'max',
tuningConfig: {
maxNumWorkers: 1,
maxRowsInMemory: 100000,
rowsPerSegment: 3000000,
},
},
sqlQuery:
'REPLACE INTO "kttm-blank-lines" OVERWRITE ALL\nSELECT\n TIME_PARSE("timestamp") AS "__time",\n "agent_type"\nFROM TABLE(\n EXTERN(\n \'{"type":"http","uris":["https://static.imply.io/example-data/kttm-with-issues/kttm-blank-lines.json"]}\',\n \'{"type":"json"}\'\n )\n) EXTEND ("timestamp" VARCHAR, "agent_type" VARCHAR)\nPARTITIONED BY DAY',
sqlQueryContext: {
maxParseExceptions: 2,
finalizeAggregations: false,
sqlQueryId: '8f889312-e989-4b4c-9895-485a1fe796d3',
groupByEnableMultiValueUnnesting: false,
sqlInsertSegmentGranularity: '"DAY"',
maxNumTasks: 2,
sqlReplaceTimeChunks: 'all',
queryId: '8f889312-e989-4b4c-9895-485a1fe796d3',
},
sqlResultsContext: {
timeZone: 'UTC',
serializeComplexValues: true,
stringifyArrays: true,
},
sqlTypeNames: ['TIMESTAMP', 'VARCHAR'],
context: {
forceTimeChunkLock: true,
useLineageBasedSegmentAllocation: true,
},
groupId: 'query-8f889312-e989-4b4c-9895-485a1fe796d3',
dataSource: 'kttm-blank-lines',
resource: {
availabilityGroup: 'query-8f889312-e989-4b4c-9895-485a1fe796d3',
requiredCapacity: 1,
},
},
});

View File

@ -16,20 +16,19 @@
* limitations under the License.
*/
import { FAILED_ASYNC_STATUS, SUCCESS_ASYNC_STATUS } from '../async-query/async-query.mock';
import { Execution } from './execution';
import { EXECUTION_INGEST_COMPLETE } from './execution-ingest-complete.mock';
describe('Execution', () => {
describe('.fromTaskDetail', () => {
describe('.fromTaskReport', () => {
it('fails for bad status (error: null)', () => {
expect(() =>
Execution.fromTaskPayloadAndReport(
{} as any,
{
asyncResultId: 'multi-stage-query-sql-1392d806-c17f-4937-94ee-8fa0a3ce1566',
error: null,
} as any,
),
Execution.fromTaskReport({
asyncResultId: 'multi-stage-query-sql-1392d806-c17f-4937-94ee-8fa0a3ce1566',
error: null,
} as any),
).toThrowError('Invalid payload');
});
@ -43,10 +42,10 @@ describe('Execution', () => {
"useLineageBasedSegmentAllocation": true,
},
"dataSource": "kttm_simple",
"groupId": "query-b55f3432-7810-4529-80ed-780a926a6f03",
"id": "query-b55f3432-7810-4529-80ed-780a926a6f03",
"groupId": "query-5aa683e2-a6ee-4655-a834-a643e91055b1",
"id": "query-5aa683e2-a6ee-4655-a834-a643e91055b1",
"resource": Object {
"availabilityGroup": "query-b55f3432-7810-4529-80ed-780a926a6f03",
"availabilityGroup": "query-5aa683e2-a6ee-4655-a834-a643e91055b1",
"requiredCapacity": 1,
},
"spec": Object {
@ -77,14 +76,16 @@ describe('Execution', () => {
"v0",
],
"context": Object {
"__user": "allowAll",
"finalize": false,
"finalizeAggregations": false,
"groupByEnableMultiValueUnnesting": false,
"maxNumTasks": 2,
"queryId": "b55f3432-7810-4529-80ed-780a926a6f03",
"maxParseExceptions": 0,
"queryId": "5aa683e2-a6ee-4655-a834-a643e91055b1",
"scanSignature": "[{\\"name\\":\\"agent_type\\",\\"type\\":\\"STRING\\"},{\\"name\\":\\"v0\\",\\"type\\":\\"LONG\\"}]",
"sqlInsertSegmentGranularity": "{\\"type\\":\\"all\\"}",
"sqlQueryId": "b55f3432-7810-4529-80ed-780a926a6f03",
"sqlQueryId": "5aa683e2-a6ee-4655-a834-a643e91055b1",
"sqlReplaceTimeChunks": "all",
},
"dataSource": Object {
@ -154,19 +155,23 @@ describe('Execution', () => {
"finalizeAggregations": false,
"groupByEnableMultiValueUnnesting": false,
"maxNumTasks": 2,
"maxParseExceptions": 0,
"queryId": "b55f3432-7810-4529-80ed-780a926a6f03",
"queryId": "5aa683e2-a6ee-4655-a834-a643e91055b1",
"sqlInsertSegmentGranularity": "{\\"type\\":\\"all\\"}",
"sqlQueryId": "b55f3432-7810-4529-80ed-780a926a6f03",
"sqlQueryId": "5aa683e2-a6ee-4655-a834-a643e91055b1",
"sqlReplaceTimeChunks": "all",
},
"sqlResultsContext": Object {
"serializeComplexValues": true,
"stringifyArrays": true,
"timeZone": "UTC",
},
"sqlTypeNames": Array [
"TIMESTAMP",
"VARCHAR",
],
"type": "query_controller",
},
"task": "query-b55f3432-7810-4529-80ed-780a926a6f03",
"task": "query-5aa683e2-a6ee-4655-a834-a643e91055b1",
},
"capacityInfo": undefined,
"destination": Object {
@ -179,24 +184,26 @@ describe('Execution', () => {
},
"type": "dataSource",
},
"duration": 28854,
"duration": 23170,
"engine": "sql-msq-task",
"error": undefined,
"id": "query-b55f3432-7810-4529-80ed-780a926a6f03",
"id": "query-5aa683e2-a6ee-4655-a834-a643e91055b1",
"nativeQuery": Object {
"columns": Array [
"agent_type",
"v0",
],
"context": Object {
"__user": "allowAll",
"finalize": false,
"finalizeAggregations": false,
"groupByEnableMultiValueUnnesting": false,
"maxNumTasks": 2,
"queryId": "b55f3432-7810-4529-80ed-780a926a6f03",
"maxParseExceptions": 0,
"queryId": "5aa683e2-a6ee-4655-a834-a643e91055b1",
"scanSignature": "[{\\"name\\":\\"agent_type\\",\\"type\\":\\"STRING\\"},{\\"name\\":\\"v0\\",\\"type\\":\\"LONG\\"}]",
"sqlInsertSegmentGranularity": "{\\"type\\":\\"all\\"}",
"sqlQueryId": "b55f3432-7810-4529-80ed-780a926a6f03",
"sqlQueryId": "5aa683e2-a6ee-4655-a834-a643e91055b1",
"sqlReplaceTimeChunks": "all",
},
"dataSource": Object {
@ -249,7 +256,6 @@ describe('Execution', () => {
"finalizeAggregations": false,
"groupByEnableMultiValueUnnesting": false,
"maxNumTasks": 2,
"maxParseExceptions": 0,
},
"result": undefined,
"sqlQuery": "REPLACE INTO \\"kttm_simple\\" OVERWRITE ALL
@ -351,7 +357,7 @@ describe('Execution', () => {
"stages": Array [
Object {
"definition": Object {
"id": "8984a4c0-89a0-4a0a-9eaa-bf03088da3e3_0",
"id": "8af42220-2724-4a76-b39f-c2f98df2de69_0",
"input": Array [
Object {
"inputFormat": Object {
@ -379,7 +385,6 @@ describe('Execution', () => {
"type": "external",
},
],
"maxInputBytesPerWorker": 10737418240,
"maxWorkerCount": 1,
"processor": Object {
"query": Object {
@ -389,19 +394,42 @@ describe('Execution', () => {
],
"context": Object {
"__timeColumn": "v0",
"__user": "allowAll",
"finalize": false,
"finalizeAggregations": false,
"groupByEnableMultiValueUnnesting": false,
"maxNumTasks": 2,
"queryId": "b55f3432-7810-4529-80ed-780a926a6f03",
"maxParseExceptions": 0,
"queryId": "5aa683e2-a6ee-4655-a834-a643e91055b1",
"scanSignature": "[{\\"name\\":\\"agent_type\\",\\"type\\":\\"STRING\\"},{\\"name\\":\\"v0\\",\\"type\\":\\"LONG\\"}]",
"sqlInsertSegmentGranularity": "{\\"type\\":\\"all\\"}",
"sqlQueryId": "b55f3432-7810-4529-80ed-780a926a6f03",
"sqlQueryId": "5aa683e2-a6ee-4655-a834-a643e91055b1",
"sqlReplaceTimeChunks": "all",
},
"dataSource": Object {
"inputNumber": 0,
"type": "inputNumber",
"inputFormat": Object {
"assumeNewlineDelimited": false,
"keepNullColumns": false,
"type": "json",
"useJsonNodeReader": false,
},
"inputSource": Object {
"type": "http",
"uris": Array [
"https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz",
],
},
"signature": Array [
Object {
"name": "timestamp",
"type": "STRING",
},
Object {
"name": "agent_type",
"type": "STRING",
},
],
"type": "external",
},
"granularity": Object {
"type": "all",
@ -454,24 +482,23 @@ describe('Execution', () => {
},
],
},
"duration": 24236,
"duration": 20483,
"partitionCount": 1,
"phase": "FINISHED",
"sort": true,
"stageNumber": 0,
"startTime": "2023-03-27T22:17:02.792Z",
"startTime": "2023-06-19T05:39:26.711Z",
"workerCount": 1,
},
Object {
"definition": Object {
"id": "8984a4c0-89a0-4a0a-9eaa-bf03088da3e3_1",
"id": "8af42220-2724-4a76-b39f-c2f98df2de69_1",
"input": Array [
Object {
"stage": 0,
"type": "stage",
},
],
"maxInputBytesPerWorker": 10737418240,
"maxWorkerCount": 1,
"processor": Object {
"columnMappings": Array [
@ -531,16 +558,16 @@ describe('Execution', () => {
},
"signature": Array [],
},
"duration": 4276,
"duration": 2381,
"partitionCount": 1,
"phase": "FINISHED",
"stageNumber": 1,
"startTime": "2023-03-27T22:17:26.978Z",
"startTime": "2023-06-19T05:39:47.166Z",
"workerCount": 1,
},
],
},
"startTime": 2023-03-27T22:17:02.401Z,
"startTime": 2023-06-19T05:39:26.377Z,
"status": "SUCCESS",
"usageInfo": Object {
"pendingTasks": 0,
@ -551,4 +578,95 @@ describe('Execution', () => {
`);
});
});
describe('.fromAsyncStatus', () => {
it('works on SUCCESS', () => {
expect(Execution.fromAsyncStatus(SUCCESS_ASYNC_STATUS)).toMatchInlineSnapshot(`
Execution {
"_payload": undefined,
"capacityInfo": undefined,
"destination": Object {
"type": "taskReport",
},
"duration": 29168,
"engine": "sql-msq-task",
"error": undefined,
"id": "query-ad84d20a-c331-4ee9-ac59-83024e369cf1",
"nativeQuery": undefined,
"queryContext": undefined,
"result": _QueryResult {
"header": Array [
Column {
"name": "channel",
"nativeType": "STRING",
"sqlType": "VARCHAR",
},
Column {
"name": "Count",
"nativeType": "LONG",
"sqlType": "BIGINT",
},
],
"query": undefined,
"queryDuration": undefined,
"queryId": undefined,
"resultContext": undefined,
"rows": Array [
Array [
"#en.wikipedia",
6650,
],
Array [
"#sh.wikipedia",
3969,
],
],
"sqlQuery": undefined,
"sqlQueryId": undefined,
},
"sqlQuery": undefined,
"stages": undefined,
"startTime": 2023-07-05T21:33:19.147Z,
"status": "SUCCESS",
"usageInfo": undefined,
"warnings": undefined,
}
`);
});
it('works on FAILED', () => {
expect(Execution.fromAsyncStatus(FAILED_ASYNC_STATUS)).toMatchInlineSnapshot(`
Execution {
"_payload": undefined,
"capacityInfo": undefined,
"destination": undefined,
"duration": 11217,
"engine": "sql-msq-task",
"error": Object {
"error": Object {
"category": "UNCATEGORIZED",
"context": Object {
"message": "java.io.UncheckedIOException: /",
},
"error": "druidException",
"errorCode": "UnknownError",
"errorMessage": "java.io.UncheckedIOException: /",
"persona": "USER",
},
"taskId": "query-36ea273a-bd6d-48de-b890-2d853d879bf8",
},
"id": "query-36ea273a-bd6d-48de-b890-2d853d879bf8",
"nativeQuery": undefined,
"queryContext": undefined,
"result": undefined,
"sqlQuery": undefined,
"stages": undefined,
"startTime": 2023-07-05T21:40:39.986Z,
"status": "FAILED",
"usageInfo": undefined,
"warnings": undefined,
}
`);
});
});
});

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { Column, QueryResult, SqlExpression, SqlQuery, SqlWithQuery } from 'druid-query-toolkit';
import { Column, QueryResult, SqlExpression, SqlQuery, SqlWithQuery } from '@druid-toolkit/query';
import {
deepGet,
@ -26,10 +26,17 @@ import {
oneOf,
pluralIfNeeded,
} from '../../utils';
import type { AsyncState, AsyncStatusResponse } from '../async-query/async-query';
import type { DruidEngine } from '../druid-engine/druid-engine';
import { validDruidEngine } from '../druid-engine/druid-engine';
import type { QueryContext } from '../query-context/query-context';
import { Stages } from '../stages/stages';
import type {
MsqTaskPayloadResponse,
MsqTaskReportResponse,
TaskStatus,
TaskStatusResponse,
} from '../task/task';
const IGNORE_CONTEXT_KEYS = [
'__asyncIdentity__',
@ -67,7 +74,7 @@ type ExecutionDestination =
| {
type: 'taskReport';
}
| { type: 'dataSource'; dataSource: string; loaded?: boolean }
| { type: 'dataSource'; dataSource: string; numRows?: number; loaded?: boolean }
| { type: 'download' };
export type ExecutionStatus = 'RUNNING' | 'FAILED' | 'SUCCESS';
@ -171,31 +178,23 @@ export interface ExecutionValue {
error?: ExecutionError;
warnings?: ExecutionError[];
capacityInfo?: CapacityInfo;
_payload?: { payload: any; task: string };
_payload?: MsqTaskPayloadResponse;
}
export class Execution {
static validAsyncStatus(
status: string | undefined,
): status is 'INITIALIZED' | 'RUNNING' | 'COMPLETE' | 'FAILED' | 'UNDETERMINED' {
return oneOf(status, 'INITIALIZED', 'RUNNING', 'COMPLETE', 'FAILED', 'UNDETERMINED');
static INLINE_DATASOURCE_MARKER = '__query_select';
static validAsyncState(status: string | undefined): status is AsyncState {
return oneOf(status, 'ACCEPTED', 'RUNNING', 'FINISHED', 'FAILED');
}
static validTaskStatus(
status: string | undefined,
): status is 'WAITING' | 'PENDING' | 'RUNNING' | 'FAILED' | 'SUCCESS' {
static validTaskStatus(status: string | undefined): status is TaskStatus {
return oneOf(status, 'WAITING', 'PENDING', 'RUNNING', 'FAILED', 'SUCCESS');
}
static normalizeAsyncStatus(
state: 'INITIALIZED' | 'RUNNING' | 'COMPLETE' | 'FAILED' | 'UNDETERMINED',
): ExecutionStatus {
static normalizeAsyncState(state: AsyncState): ExecutionStatus {
switch (state) {
case 'COMPLETE':
return 'SUCCESS';
case 'INITIALIZED':
case 'UNDETERMINED':
case 'ACCEPTED':
return 'RUNNING';
default:
@ -204,9 +203,7 @@ export class Execution {
}
// Treat WAITING as PENDING since they are all the same as far as the UI is concerned
static normalizeTaskStatus(
status: 'WAITING' | 'PENDING' | 'RUNNING' | 'FAILED' | 'SUCCESS',
): ExecutionStatus {
static normalizeTaskStatus(status: TaskStatus): ExecutionStatus {
switch (status) {
case 'SUCCESS':
case 'FAILED':
@ -249,8 +246,58 @@ export class Execution {
});
}
static fromAsyncStatus(
asyncSubmitResult: AsyncStatusResponse,
sqlQuery?: string,
queryContext?: QueryContext,
): Execution {
const { queryId, schema, result, errorDetails } = asyncSubmitResult;
let queryResult: QueryResult | undefined;
if (schema && result?.sampleRecords) {
queryResult = new QueryResult({
header: schema.map(
s => new Column({ name: s.name, sqlType: s.type, nativeType: s.nativeType }),
),
rows: result.sampleRecords,
}).inflateDatesFromSqlTypes();
}
let executionError: ExecutionError | undefined;
if (errorDetails) {
executionError = {
taskId: queryId,
error: errorDetails as any,
};
}
return new Execution({
engine: 'sql-msq-task',
id: queryId,
startTime: new Date(asyncSubmitResult.createdAt),
duration: asyncSubmitResult.durationMs,
status: Execution.normalizeAsyncState(asyncSubmitResult.state),
sqlQuery,
queryContext,
error: executionError,
destination:
typeof result?.dataSource === 'string'
? result.dataSource !== Execution.INLINE_DATASOURCE_MARKER
? {
type: 'dataSource',
dataSource: result.dataSource,
numRows: result.numTotalRows,
}
: {
type: 'taskReport',
}
: undefined,
result: queryResult,
});
}
static fromTaskStatus(
taskStatus: { status: any; task: string },
taskStatus: TaskStatusResponse,
sqlQuery?: string,
queryContext?: QueryContext,
): Execution {
@ -282,13 +329,7 @@ export class Execution {
});
}
static fromTaskPayloadAndReport(
taskPayload: { payload: any; task: string },
taskReport: {
multiStageQuery: { type: string; payload: any; taskId: string };
error?: any;
},
): Execution {
static fromTaskReport(taskReport: MsqTaskReportResponse): Execution {
// Must have status set for a valid report
const id = deepGet(taskReport, 'multiStageQuery.taskId');
const status = deepGet(taskReport, 'multiStageQuery.payload.status.status');
@ -328,7 +369,7 @@ export class Execution {
}).inflateDatesFromSqlTypes();
}
let res = new Execution({
return new Execution({
engine: 'sql-msq-task',
id,
status: Execution.normalizeTaskStatus(status),
@ -342,21 +383,8 @@ export class Execution {
: undefined,
error,
warnings: Array.isArray(warnings) ? warnings : undefined,
destination: deepGet(taskPayload, 'payload.spec.destination'),
result,
nativeQuery: deepGet(taskPayload, 'payload.spec.query'),
_payload: taskPayload,
});
if (deepGet(taskPayload, 'payload.sqlQuery')) {
res = res.changeSqlQuery(
deepGet(taskPayload, 'payload.sqlQuery'),
deleteKeys(deepGet(taskPayload, 'payload.sqlQueryContext'), IGNORE_CONTEXT_KEYS),
);
}
return res;
}
static fromResult(engine: DruidEngine, result: QueryResult): Execution {
@ -480,16 +508,26 @@ export class Execution {
});
}
public updateWith(newSummary: Execution): Execution {
let nextSummary = newSummary;
if (this.sqlQuery && !nextSummary.sqlQuery) {
nextSummary = nextSummary.changeSqlQuery(this.sqlQuery, this.queryContext);
}
if (this.destination && !nextSummary.destination) {
nextSummary = nextSummary.changeDestination(this.destination);
public updateWithTaskPayload(taskPayload: MsqTaskPayloadResponse): Execution {
const value = this.valueOf();
value._payload = taskPayload;
value.destination = {
...value.destination,
...(deepGet(taskPayload, 'payload.spec.destination') || {}),
};
value.nativeQuery = deepGet(taskPayload, 'payload.spec.query');
let ret = new Execution(value);
if (deepGet(taskPayload, 'payload.sqlQuery')) {
ret = ret.changeSqlQuery(
deepGet(taskPayload, 'payload.sqlQuery'),
deleteKeys(deepGet(taskPayload, 'payload.sqlQueryContext'), IGNORE_CONTEXT_KEYS),
);
}
return nextSummary;
return ret;
}
public attachErrorFromStatus(status: any): Execution {
@ -550,6 +588,22 @@ export class Execution {
return destination.dataSource;
}
public getIngestNumRows(): number | undefined {
const { destination, stages } = this;
if (destination?.type === 'dataSource' && typeof destination.numRows === 'number') {
return destination.numRows;
}
const lastStage = stages?.getLastStage();
if (stages && lastStage && lastStage.definition.processor.type === 'segmentGenerator') {
// Assume input0 since we know the segmentGenerator will only ever have one stage input
return stages.getTotalCounterForStage(lastStage, 'input0', 'rows');
}
return;
}
public isSuccessfulInsert(): boolean {
return Boolean(
this.isFullyComplete() && this.getIngestDatasource() && this.status === 'SUCCESS',

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import type { SqlQuery } from 'druid-query-toolkit';
import type { SqlQuery } from '@druid-toolkit/query';
import {
C,
F,
@ -28,7 +28,7 @@ import {
SqlLiteral,
SqlStar,
SqlType,
} from 'druid-query-toolkit';
} from '@druid-toolkit/query';
import * as JSONBig from 'json-bigint-native';
import { nonEmptyArray } from '../../utils';

View File

@ -21,7 +21,7 @@ import React from 'react';
import type { Field } from '../../components';
import { ExternalLink } from '../../components';
import { getLink } from '../../links';
import { deepGet, EMPTY_ARRAY, oneOf, typeIs } from '../../utils';
import { deepGet, EMPTY_ARRAY, oneOf, typeIsKnown } from '../../utils';
import type { IngestionSpec } from '../ingestion-spec/ingestion-spec';
export interface DruidFilter {
@ -90,32 +90,32 @@ export const FILTER_FIELDS: Field<DruidFilter>[] = [
{
name: 'dimension',
type: 'string',
defined: typeIs('selector', 'in', 'interval', 'regex', 'like'),
defined: typeIsKnown(KNOWN_FILTER_TYPES, 'selector', 'in', 'interval', 'regex', 'like'),
required: true,
},
{
name: 'value',
type: 'string',
defined: typeIs('selector'),
defined: typeIsKnown(KNOWN_FILTER_TYPES, 'selector'),
required: true,
},
{
name: 'values',
type: 'string-array',
defined: typeIs('in'),
defined: typeIsKnown(KNOWN_FILTER_TYPES, 'in'),
required: true,
},
{
name: 'intervals',
type: 'string-array',
defined: typeIs('interval'),
defined: typeIsKnown(KNOWN_FILTER_TYPES, 'interval'),
required: true,
placeholder: 'ex: 2020-01-01/2020-06-01',
},
{
name: 'pattern',
type: 'string',
defined: typeIs('regex', 'like'),
defined: typeIsKnown(KNOWN_FILTER_TYPES, 'regex', 'like'),
required: true,
},
@ -124,14 +124,14 @@ export const FILTER_FIELDS: Field<DruidFilter>[] = [
label: 'Sub-filter type',
type: 'string',
suggestions: ['selector', 'in', 'interval', 'regex', 'like'],
defined: typeIs('not'),
defined: typeIsKnown(KNOWN_FILTER_TYPES, 'not'),
required: true,
},
{
name: 'field.dimension',
label: 'Sub-filter dimension',
type: 'string',
defined: typeIs('not'),
defined: typeIsKnown(KNOWN_FILTER_TYPES, 'not'),
},
{
name: 'field.value',

View File

@ -21,7 +21,7 @@ import React from 'react';
import type { Field } from '../../components';
import { ExternalLink } from '../../components';
import { getLink } from '../../links';
import { typeIs } from '../../utils';
import { typeIsKnown } from '../../utils';
export interface FlattenSpec {
useFieldDiscovery?: boolean;
@ -34,6 +34,7 @@ export interface FlattenField {
expr: string;
}
const KNOWN_TYPES = ['path', 'jq', 'root'];
export const FLATTEN_FIELD_FIELDS: Field<FlattenField>[] = [
{
name: 'name',
@ -44,14 +45,14 @@ export const FLATTEN_FIELD_FIELDS: Field<FlattenField>[] = [
{
name: 'type',
type: 'string',
suggestions: ['path', 'jq', 'root'],
suggestions: KNOWN_TYPES,
required: true,
},
{
name: 'expr',
type: 'string',
placeholder: '$.thing',
defined: typeIs('path', 'jq'),
defined: typeIsKnown(KNOWN_TYPES, 'path', 'jq'),
required: true,
info: (
<>

View File

@ -16,6 +16,7 @@
* limitations under the License.
*/
export * from './async-query/async-query';
export * from './compaction-config/compaction-config';
export * from './compaction-status/compaction-status';
export * from './coordinator-dynamic-config/coordinator-dynamic-config';
@ -35,6 +36,7 @@ export * from './metric-spec/metric-spec';
export * from './overlord-dynamic-config/overlord-dynamic-config';
export * from './query-context/query-context';
export * from './stages/stages';
export * from './task/task';
export * from './time/time';
export * from './timestamp-spec/timestamp-spec';
export * from './transform-spec/transform-spec';

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { sane, SqlQuery } from 'druid-query-toolkit';
import { sane, SqlQuery } from '@druid-toolkit/query';
import { fitIngestQueryPattern, ingestQueryPatternToQuery } from './ingest-query-pattern';

View File

@ -25,7 +25,7 @@ import {
SqlReplaceClause,
SqlWithPart,
T,
} from 'druid-query-toolkit';
} from '@druid-toolkit/query';
import { filterMap, oneOf } from '../../utils';
import type { ExternalConfig } from '../external-config/external-config';

View File

@ -18,6 +18,7 @@
import { Code } from '@blueprintjs/core';
import { range } from 'd3-array';
import type { JSX } from 'react';
import React from 'react';
import type { Field } from '../../components';
@ -38,7 +39,7 @@ import {
isSimpleArray,
oneOf,
parseCsvLine,
typeIs,
typeIsKnown,
} from '../../utils';
import type { SampleResponse } from '../../utils/sampler';
import type { DimensionsSpec } from '../dimension-spec/dimension-spec';
@ -82,6 +83,7 @@ export function isEmptyIngestionSpec(spec: Partial<IngestionSpec>) {
}
export type IngestionType = 'kafka' | 'kinesis' | 'index_parallel';
const KNOWN_TYPES = ['kafka', 'kinesis', 'index_parallel'];
// A combination of IngestionType and inputSourceType
export type IngestionComboType =
@ -921,7 +923,7 @@ export function getIoConfigFormFields(ingestionComboType: IngestionComboType): F
name: 'topic',
type: 'string',
required: true,
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
placeholder: 'topic_name',
},
{
@ -1102,7 +1104,7 @@ export function getIoConfigTuningFormFields(
{
name: 'useEarliestOffset',
type: 'boolean',
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
required: true,
info: (
<>
@ -1118,7 +1120,7 @@ export function getIoConfigTuningFormFields(
{
name: 'useEarliestSequenceNumber',
type: 'boolean',
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
required: true,
info: (
<>
@ -1189,14 +1191,14 @@ export function getIoConfigTuningFormFields(
name: 'recordsPerFetch',
type: 'number',
defaultValue: 4000,
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
info: <>The number of records to request per GetRecords call to Kinesis.</>,
},
{
name: 'pollTimeout',
type: 'number',
defaultValue: 100,
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
info: (
<>
<p>
@ -1209,14 +1211,14 @@ export function getIoConfigTuningFormFields(
name: 'fetchDelayMillis',
type: 'number',
defaultValue: 0,
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
info: <>Time in milliseconds to wait between subsequent GetRecords calls to Kinesis.</>,
},
{
name: 'deaggregate',
type: 'boolean',
defaultValue: false,
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
info: <>Whether to use the de-aggregate function of the KCL.</>,
},
{
@ -1284,7 +1286,7 @@ export function getIoConfigTuningFormFields(
name: 'skipOffsetGaps',
type: 'boolean',
defaultValue: false,
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
info: (
<>
<p>
@ -1722,7 +1724,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
type: 'number',
defaultValue: 1,
min: 1,
defined: typeIs('index_parallel'),
defined: typeIsKnown(KNOWN_TYPES, 'index_parallel'),
info: (
<>
Maximum number of tasks which can be run at the same time. The supervisor task would spawn
@ -1737,7 +1739,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.maxRetry',
type: 'number',
defaultValue: 3,
defined: typeIs('index_parallel'),
defined: typeIsKnown(KNOWN_TYPES, 'index_parallel'),
hideInMore: true,
info: <>Maximum number of retries on task failures.</>,
},
@ -1745,7 +1747,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.taskStatusCheckPeriodMs',
type: 'number',
defaultValue: 1000,
defined: typeIs('index_parallel'),
defined: typeIsKnown(KNOWN_TYPES, 'index_parallel'),
hideInMore: true,
info: <>Polling period in milliseconds to check running task statuses.</>,
},
@ -1806,7 +1808,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.resetOffsetAutomatically',
type: 'boolean',
defaultValue: false,
defined: typeIs('kafka', 'kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka', 'kinesis'),
info: (
<>
Whether to reset the consumer offset if the next offset that it is trying to fetch is less
@ -1818,7 +1820,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.skipSequenceNumberAvailabilityCheck',
type: 'boolean',
defaultValue: false,
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
info: (
<>
Whether to enable checking if the current sequence number is still available in a particular
@ -1831,14 +1833,14 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.intermediatePersistPeriod',
type: 'duration',
defaultValue: 'PT10M',
defined: typeIs('kafka', 'kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka', 'kinesis'),
info: <>The period that determines the rate at which intermediate persists occur.</>,
},
{
name: 'spec.tuningConfig.intermediateHandoffPeriod',
type: 'duration',
defaultValue: 'P2147483647D',
defined: typeIs('kafka', 'kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka', 'kinesis'),
info: (
<>
How often the tasks should hand off segments. Handoff will happen either if
@ -1875,7 +1877,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.handoffConditionTimeout',
type: 'number',
defaultValue: 0,
defined: typeIs('kafka', 'kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka', 'kinesis'),
hideInMore: true,
info: <>Milliseconds to wait for segment handoff. 0 means to wait forever.</>,
},
@ -1921,7 +1923,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
defined: s =>
s.type === 'index_parallel' && deepGet(s, 'spec.ioConfig.inputSource.type') !== 'http',
hideInMore: true,
adjustment: s => deepSet(s, 'splitHintSpec.type', 'maxSize'),
adjustment: s => deepSet(s, 'spec.tuningConfig.splitHintSpec.type', 'maxSize'),
info: (
<>
Maximum number of bytes of input files to process in a single subtask. If a single file is
@ -1935,9 +1937,9 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
type: 'number',
defaultValue: 1000,
min: 1,
defined: typeIs('index_parallel'),
defined: typeIsKnown(KNOWN_TYPES, 'index_parallel'),
hideInMore: true,
adjustment: s => deepSet(s, 'splitHintSpec.type', 'maxSize'),
adjustment: s => deepSet(s, 'spec.tuningConfig.splitHintSpec.type', 'maxSize'),
info: (
<>
Maximum number of input files to process in a single subtask. This limit is to avoid task
@ -1953,7 +1955,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.chatHandlerTimeout',
type: 'duration',
defaultValue: 'PT10S',
defined: typeIs('index_parallel'),
defined: typeIsKnown(KNOWN_TYPES, 'index_parallel'),
hideInMore: true,
info: <>Timeout for reporting the pushed segments in worker tasks.</>,
},
@ -1961,7 +1963,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.chatHandlerNumRetries',
type: 'number',
defaultValue: 5,
defined: typeIs('index_parallel'),
defined: typeIsKnown(KNOWN_TYPES, 'index_parallel'),
hideInMore: true,
info: <>Retries for reporting the pushed segments in worker tasks.</>,
},
@ -1969,7 +1971,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.workerThreads',
type: 'number',
placeholder: 'min(10, taskCount)',
defined: typeIs('kafka', 'kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka', 'kinesis'),
info: (
<>The number of threads that will be used by the supervisor for asynchronous operations.</>
),
@ -1978,7 +1980,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.chatThreads',
type: 'number',
placeholder: 'min(10, taskCount * replicas)',
defined: typeIs('kafka', 'kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka', 'kinesis'),
hideInMore: true,
info: <>The number of threads that will be used for communicating with indexing tasks.</>,
},
@ -1986,7 +1988,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.chatRetries',
type: 'number',
defaultValue: 8,
defined: typeIs('kafka', 'kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka', 'kinesis'),
hideInMore: true,
info: (
<>
@ -1999,14 +2001,14 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.httpTimeout',
type: 'duration',
defaultValue: 'PT10S',
defined: typeIs('kafka', 'kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka', 'kinesis'),
info: <>How long to wait for a HTTP response from an indexing task.</>,
},
{
name: 'spec.tuningConfig.shutdownTimeout',
type: 'duration',
defaultValue: 'PT80S',
defined: typeIs('kafka', 'kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka', 'kinesis'),
hideInMore: true,
info: (
<>
@ -2018,7 +2020,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.offsetFetchPeriod',
type: 'duration',
defaultValue: 'PT30S',
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
info: (
<>
How often the supervisor queries Kafka and the indexing tasks to fetch current offsets and
@ -2030,7 +2032,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.recordBufferSize',
type: 'number',
defaultValue: 10000,
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
info: (
<>
Size of the buffer (number of events) used between the Kinesis fetch threads and the main
@ -2042,7 +2044,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.recordBufferOfferTimeout',
type: 'number',
defaultValue: 5000,
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
hideInMore: true,
info: (
<>
@ -2056,7 +2058,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
hideInMore: true,
type: 'number',
defaultValue: 5000,
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
info: (
<>
Length of time in milliseconds to wait for the buffer to drain before attempting to fetch
@ -2068,7 +2070,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.fetchThreads',
type: 'number',
placeholder: 'max(1, {numProcessors} - 1)',
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
hideInMore: true,
info: (
<>
@ -2081,7 +2083,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.maxRecordsPerPoll',
type: 'number',
defaultValue: 100,
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
hideInMore: true,
info: (
<>
@ -2094,7 +2096,7 @@ const TUNING_FORM_FIELDS: Field<IngestionSpec>[] = [
name: 'spec.tuningConfig.repartitionTransitionDuration',
type: 'duration',
defaultValue: 'PT2M',
defined: typeIs('kinesis'),
defined: typeIsKnown(KNOWN_TYPES, 'kinesis'),
hideInMore: true,
info: (
<>

View File

@ -22,7 +22,7 @@ import React from 'react';
import type { Field } from '../../components';
import { AutoForm, ExternalLink } from '../../components';
import { getLink } from '../../links';
import { compact, deepGet, deepSet, oneOf, typeIs } from '../../utils';
import { compact, deepGet, deepSet, oneOf, typeIs, typeIsKnown } from '../../utils';
import type { FlattenSpec } from '../flatten-spec/flatten-spec';
export interface InputFormat {
@ -49,13 +49,25 @@ export interface InputFormat {
readonly valueFormat?: InputFormat;
}
const KNOWN_TYPES = [
'json',
'csv',
'tsv',
'parquet',
'orc',
'avro_ocf',
'avro_stream',
'regex',
'kafka',
'javascript',
];
function generateInputFormatFields(streaming: boolean) {
return compact([
{
name: 'type',
label: 'Input format',
type: 'string',
suggestions: ['json', 'csv', 'tsv', 'parquet', 'orc', 'avro_ocf', 'avro_stream', 'regex'],
suggestions: KNOWN_TYPES,
required: true,
info: (
<>
@ -74,7 +86,7 @@ function generateInputFormatFields(streaming: boolean) {
name: 'featureSpec',
label: 'JSON parser features',
type: 'json',
defined: typeIs('json'),
defined: typeIsKnown(KNOWN_TYPES, 'json'),
info: (
<>
<p>
@ -95,7 +107,7 @@ function generateInputFormatFields(streaming: boolean) {
? {
name: 'assumeNewlineDelimited',
type: 'boolean',
defined: typeIs('json'),
defined: typeIsKnown(KNOWN_TYPES, 'json'),
disabled: inputFormat => Boolean(inputFormat.useJsonNodeReader),
defaultValue: false,
info: (
@ -125,7 +137,7 @@ function generateInputFormatFields(streaming: boolean) {
name: 'useJsonNodeReader',
label: 'Use JSON node reader',
type: 'boolean',
defined: typeIs('json'),
defined: typeIsKnown(KNOWN_TYPES, 'json'),
disabled: inputFormat => Boolean(inputFormat.assumeNewlineDelimited),
defaultValue: false,
info: (
@ -154,26 +166,26 @@ function generateInputFormatFields(streaming: boolean) {
type: 'string',
defaultValue: '\t',
suggestions: ['\t', ';', '|', '#'],
defined: typeIs('tsv'),
defined: typeIsKnown(KNOWN_TYPES, 'tsv'),
info: <>A custom delimiter for data values.</>,
},
{
name: 'pattern',
type: 'string',
defined: typeIs('regex'),
defined: typeIsKnown(KNOWN_TYPES, 'regex'),
required: true,
},
{
name: 'function',
type: 'string',
defined: typeIs('javascript'),
defined: typeIsKnown(KNOWN_TYPES, 'javascript'),
required: true,
},
{
name: 'skipHeaderRows',
type: 'number',
defaultValue: 0,
defined: typeIs('csv', 'tsv'),
defined: typeIsKnown(KNOWN_TYPES, 'csv', 'tsv'),
min: 0,
info: (
<>
@ -184,7 +196,7 @@ function generateInputFormatFields(streaming: boolean) {
{
name: 'findColumnsFromHeader',
type: 'boolean',
defined: typeIs('csv', 'tsv'),
defined: typeIsKnown(KNOWN_TYPES, 'csv', 'tsv'),
required: true,
info: (
<>
@ -214,14 +226,14 @@ function generateInputFormatFields(streaming: boolean) {
type: 'string',
defaultValue: '\x01',
suggestions: ['\x01', '\x00'],
defined: typeIs('csv', 'tsv', 'regex'),
defined: typeIsKnown(KNOWN_TYPES, 'csv', 'tsv', 'regex'),
info: <>A custom delimiter for multi-value dimensions.</>,
},
{
name: 'binaryAsString',
type: 'boolean',
defaultValue: false,
defined: typeIs('parquet', 'orc', 'avro_ocf', 'avro_stream'),
defined: typeIsKnown(KNOWN_TYPES, 'parquet', 'orc', 'avro_ocf', 'avro_stream'),
info: (
<>
Specifies if the binary column which is not logically marked as a string should be treated
@ -240,7 +252,7 @@ export const KAFKA_METADATA_INPUT_FORMAT_FIELDS: Field<InputFormat>[] = [
label: 'Kafka timestamp column name',
type: 'string',
defaultValue: 'kafka.timestamp',
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
info: `Name of the column for the kafka record's timestamp.`,
},
@ -263,7 +275,7 @@ export const KAFKA_METADATA_INPUT_FORMAT_FIELDS: Field<InputFormat>[] = [
'regex',
],
placeholder: `(don't parse Kafka key)`,
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
info: (
<>
<p>The parser used to parse the key of the Kafka message.</p>
@ -476,7 +488,7 @@ export const KAFKA_METADATA_INPUT_FORMAT_FIELDS: Field<InputFormat>[] = [
name: 'headerFormat.type',
label: 'Kafka header format type',
type: 'string',
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
placeholder: `(don't parse Kafka herders)`,
suggestions: [undefined, 'string'],
},

View File

@ -21,7 +21,7 @@ import React from 'react';
import type { Field } from '../../components';
import { ExternalLink } from '../../components';
import { getLink } from '../../links';
import { deepGet, deepSet, nonEmptyArray, typeIs } from '../../utils';
import { deepGet, deepSet, nonEmptyArray, typeIsKnown } from '../../utils';
export const FILTER_SUGGESTIONS: string[] = [
'*',
@ -167,6 +167,7 @@ export function issueWithInputSource(inputSource: InputSource | undefined): stri
}
}
const KNOWN_TYPES = ['inline', 'druid', 'http', 'local', 's3', 'azure', 'google', 'hdfs', 'sql'];
export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
// inline
@ -174,7 +175,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
name: 'data',
label: 'Inline data',
type: 'string',
defined: typeIs('inline'),
defined: typeIsKnown(KNOWN_TYPES, 'inline'),
required: true,
placeholder: 'Paste your data here',
multiline: true,
@ -188,7 +189,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
label: 'URIs',
type: 'string-array',
placeholder: 'https://example.com/path/to/file1.ext, https://example.com/path/to/file2.ext',
defined: typeIs('http'),
defined: typeIsKnown(KNOWN_TYPES, 'http'),
required: true,
info: (
<p>
@ -201,7 +202,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
name: 'httpAuthenticationUsername',
label: 'HTTP auth username',
type: 'string',
defined: typeIs('http'),
defined: typeIsKnown(KNOWN_TYPES, 'http'),
placeholder: '(optional)',
info: <p>Username to use for authentication with specified URIs</p>,
},
@ -209,7 +210,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
name: 'httpAuthenticationPassword',
label: 'HTTP auth password',
type: 'string',
defined: typeIs('http'),
defined: typeIsKnown(KNOWN_TYPES, 'http'),
placeholder: '(optional)',
info: <p>Password to use for authentication with specified URIs</p>,
},
@ -221,7 +222,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
label: 'Base directory',
type: 'string',
placeholder: '/path/to/files/',
defined: typeIs('local'),
defined: typeIsKnown(KNOWN_TYPES, 'local'),
required: true,
info: (
<>
@ -236,7 +237,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
name: 'filter',
label: 'File filter',
type: 'string',
defined: typeIs('local'),
defined: typeIsKnown(KNOWN_TYPES, 'local'),
required: true,
suggestions: FILTER_SUGGESTIONS,
info: (
@ -441,7 +442,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
type: 'string',
suggestions: FILTER_SUGGESTIONS,
placeholder: '*',
defined: typeIs('s3', 'azure', 'google'),
defined: typeIsKnown(KNOWN_TYPES, 's3', 'azure', 'google'),
info: (
<p>
A wildcard filter for files. See{' '}
@ -461,7 +462,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
type: 'string',
suggestions: [undefined, 'environment', 'default'],
placeholder: '(none)',
defined: typeIs('s3'),
defined: typeIsKnown(KNOWN_TYPES, 's3'),
info: (
<>
<p>S3 access key type.</p>
@ -518,7 +519,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
type: 'string',
suggestions: [undefined, 'environment', 'default'],
placeholder: '(none)',
defined: typeIs('s3'),
defined: typeIsKnown(KNOWN_TYPES, 's3'),
info: (
<>
<p>S3 secret key type.</p>
@ -566,7 +567,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
label: 'Paths',
type: 'string',
placeholder: '/path/to/file.ext',
defined: typeIs('hdfs'),
defined: typeIsKnown(KNOWN_TYPES, 'hdfs'),
required: true,
},
@ -576,7 +577,7 @@ export const INPUT_SOURCE_FIELDS: Field<InputSource>[] = [
label: 'Database type',
type: 'string',
suggestions: ['mysql', 'postgresql'],
defined: typeIs('sql'),
defined: typeIsKnown(KNOWN_TYPES, 'sql'),
required: true,
info: (
<>

View File

@ -21,7 +21,7 @@ import React from 'react';
import type { Field } from '../../components';
import { AutoForm } from '../../components';
import { deepGet, deepSet, oneOf, pluralIfNeeded, typeIs } from '../../utils';
import { deepGet, deepSet, oneOf, oneOfKnown, pluralIfNeeded, typeIsKnown } from '../../utils';
export interface ExtractionNamespaceSpec {
readonly type: string;
@ -90,11 +90,14 @@ function issueWithConnectUri(uri: string): string | undefined {
return;
}
const KNOWN_TYPES = ['map', 'cachedNamespace', 'kafka'];
const KNOWN_EXTRACTION_NAMESPACE_TYPES = ['uri', 'jdbc'];
const KNOWN_NAMESPACE_PARSE_SPEC_FORMATS = ['csv', 'tsv', 'simpleJson', 'customJson'];
export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
{
name: 'type',
type: 'string',
suggestions: ['map', 'cachedNamespace', 'kafka'],
suggestions: KNOWN_TYPES,
required: true,
adjustment: l => {
if (l.type === 'map' && !l.map) {
@ -115,7 +118,7 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'map',
type: 'json',
height: '60vh',
defined: typeIs('map'),
defined: typeIsKnown(KNOWN_TYPES, 'map'),
required: true,
issueWithValue: value => {
if (!value) return 'map must be defined';
@ -136,8 +139,8 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
label: 'Extraction type',
type: 'string',
placeholder: 'uri',
suggestions: ['uri', 'jdbc'],
defined: typeIs('cachedNamespace'),
suggestions: KNOWN_EXTRACTION_NAMESPACE_TYPES,
defined: typeIsKnown(KNOWN_TYPES, 'cachedNamespace'),
required: true,
},
@ -147,7 +150,8 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
type: 'string',
placeholder: 's3://bucket/some/key/prefix/',
defined: l =>
deepGet(l, 'extractionNamespace.type') === 'uri' && !deepGet(l, 'extractionNamespace.uri'),
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'uri') &&
!deepGet(l, 'extractionNamespace.uri'),
required: l =>
!deepGet(l, 'extractionNamespace.uriPrefix') && !deepGet(l, 'extractionNamespace.uri'),
issueWithValue: issueWithUri,
@ -165,7 +169,7 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
label: 'URI (deprecated)',
placeholder: 's3://bucket/some/key/prefix/lookups-01.gz',
defined: l =>
deepGet(l, 'extractionNamespace.type') === 'uri' &&
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'uri') &&
!deepGet(l, 'extractionNamespace.uriPrefix'),
required: l =>
!deepGet(l, 'extractionNamespace.uriPrefix') && !deepGet(l, 'extractionNamespace.uri'),
@ -186,7 +190,7 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
type: 'string',
defaultValue: '.*',
defined: l =>
deepGet(l, 'extractionNamespace.type') === 'uri' &&
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'uri') &&
Boolean(deepGet(l, 'extractionNamespace.uriPrefix')),
info: 'Optional regex for matching the file name under uriPrefix.',
},
@ -196,8 +200,9 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'extractionNamespace.namespaceParseSpec.format',
label: 'Parse format',
type: 'string',
suggestions: ['csv', 'tsv', 'simpleJson', 'customJson'],
defined: l => deepGet(l, 'extractionNamespace.type') === 'uri',
suggestions: KNOWN_NAMESPACE_PARSE_SPEC_FORMATS,
defined: l =>
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'uri'),
required: true,
info: (
<>
@ -217,7 +222,12 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
type: 'string',
defaultValue: '\t',
suggestions: ['\t', ';', '|', '#'],
defined: l => deepGet(l, 'extractionNamespace.namespaceParseSpec.format') === 'tsv',
defined: l =>
oneOfKnown(
deepGet(l, 'extractionNamespace.namespaceParseSpec.format'),
KNOWN_NAMESPACE_PARSE_SPEC_FORMATS,
'tsv',
),
},
// CSV + TSV
@ -225,21 +235,39 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'extractionNamespace.namespaceParseSpec.skipHeaderRows',
type: 'number',
defaultValue: 0,
defined: l => oneOf(deepGet(l, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
defined: l =>
oneOfKnown(
deepGet(l, 'extractionNamespace.namespaceParseSpec.format'),
KNOWN_NAMESPACE_PARSE_SPEC_FORMATS,
'csv',
'tsv',
),
info: `Number of header rows to be skipped.`,
},
{
name: 'extractionNamespace.namespaceParseSpec.hasHeaderRow',
type: 'boolean',
defaultValue: false,
defined: l => oneOf(deepGet(l, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
defined: l =>
oneOfKnown(
deepGet(l, 'extractionNamespace.namespaceParseSpec.format'),
KNOWN_NAMESPACE_PARSE_SPEC_FORMATS,
'csv',
'tsv',
),
info: `A flag to indicate that column information can be extracted from the input files' header row`,
},
{
name: 'extractionNamespace.namespaceParseSpec.columns',
type: 'string-array',
placeholder: 'key, value',
defined: l => oneOf(deepGet(l, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
defined: l =>
oneOfKnown(
deepGet(l, 'extractionNamespace.namespaceParseSpec.format'),
KNOWN_NAMESPACE_PARSE_SPEC_FORMATS,
'csv',
'tsv',
),
required: l => !deepGet(l, 'extractionNamespace.namespaceParseSpec.hasHeaderRow'),
info: 'The list of columns in the csv file',
},
@ -247,14 +275,26 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'extractionNamespace.namespaceParseSpec.keyColumn',
type: 'string',
placeholder: '(optional - defaults to the first column)',
defined: l => oneOf(deepGet(l, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
defined: l =>
oneOfKnown(
deepGet(l, 'extractionNamespace.namespaceParseSpec.format'),
KNOWN_NAMESPACE_PARSE_SPEC_FORMATS,
'csv',
'tsv',
),
info: 'The name of the column containing the key',
},
{
name: 'extractionNamespace.namespaceParseSpec.valueColumn',
type: 'string',
placeholder: '(optional - defaults to the second column)',
defined: l => oneOf(deepGet(l, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
defined: l =>
oneOfKnown(
deepGet(l, 'extractionNamespace.namespaceParseSpec.format'),
KNOWN_NAMESPACE_PARSE_SPEC_FORMATS,
'csv',
'tsv',
),
info: 'The name of the column containing the value',
},
@ -263,14 +303,24 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'extractionNamespace.namespaceParseSpec.keyFieldName',
type: 'string',
placeholder: `key`,
defined: l => deepGet(l, 'extractionNamespace.namespaceParseSpec.format') === 'customJson',
defined: l =>
oneOfKnown(
deepGet(l, 'extractionNamespace.namespaceParseSpec.format'),
KNOWN_NAMESPACE_PARSE_SPEC_FORMATS,
'customJson',
),
required: true,
},
{
name: 'extractionNamespace.namespaceParseSpec.valueFieldName',
type: 'string',
placeholder: `value`,
defined: l => deepGet(l, 'extractionNamespace.namespaceParseSpec.format') === 'customJson',
defined: l =>
oneOfKnown(
deepGet(l, 'extractionNamespace.namespaceParseSpec.format'),
KNOWN_NAMESPACE_PARSE_SPEC_FORMATS,
'customJson',
),
required: true,
},
@ -279,7 +329,8 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'extractionNamespace.connectorConfig.connectURI',
label: 'Connect URI',
type: 'string',
defined: l => deepGet(l, 'extractionNamespace.type') === 'jdbc',
defined: l =>
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'jdbc'),
required: true,
issueWithValue: issueWithConnectUri,
info: 'Defines the connectURI for connecting to the database',
@ -287,20 +338,23 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
{
name: 'extractionNamespace.connectorConfig.user',
type: 'string',
defined: l => deepGet(l, 'extractionNamespace.type') === 'jdbc',
defined: l =>
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'jdbc'),
info: 'Defines the user to be used by the connector config',
},
{
name: 'extractionNamespace.connectorConfig.password',
type: 'string',
defined: l => deepGet(l, 'extractionNamespace.type') === 'jdbc',
defined: l =>
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'jdbc'),
info: 'Defines the password to be used by the connector config',
},
{
name: 'extractionNamespace.table',
type: 'string',
placeholder: 'lookup_table',
defined: l => deepGet(l, 'extractionNamespace.type') === 'jdbc',
defined: l =>
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'jdbc'),
required: true,
info: (
<>
@ -318,7 +372,8 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'extractionNamespace.keyColumn',
type: 'string',
placeholder: 'key_column',
defined: l => deepGet(l, 'extractionNamespace.type') === 'jdbc',
defined: l =>
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'jdbc'),
required: true,
info: (
<>
@ -336,7 +391,8 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'extractionNamespace.valueColumn',
type: 'string',
placeholder: 'value_column',
defined: l => deepGet(l, 'extractionNamespace.type') === 'jdbc',
defined: l =>
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'jdbc'),
required: true,
info: (
<>
@ -355,7 +411,8 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
type: 'string',
label: 'Timestamp column',
placeholder: 'timestamp_column (optional)',
defined: l => deepGet(l, 'extractionNamespace.type') === 'jdbc',
defined: l =>
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'jdbc'),
info: (
<>
<p>
@ -372,7 +429,8 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'extractionNamespace.filter',
type: 'string',
placeholder: 'for_lookup = 1 (optional)',
defined: l => deepGet(l, 'extractionNamespace.type') === 'jdbc',
defined: l =>
oneOfKnown(deepGet(l, 'extractionNamespace.type'), KNOWN_EXTRACTION_NAMESPACE_TYPES, 'jdbc'),
info: (
<>
<p>
@ -389,7 +447,13 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
{
name: 'extractionNamespace.pollPeriod',
type: 'duration',
defined: l => oneOf(deepGet(l, 'extractionNamespace.type'), 'uri', 'jdbc'),
defined: l =>
oneOfKnown(
deepGet(l, 'extractionNamespace.type'),
KNOWN_EXTRACTION_NAMESPACE_TYPES,
'uri',
'jdbc',
),
info: `Period between polling for updates`,
required: true,
suggestions: ['PT1M', 'PT10M', 'PT30M', 'PT1H', 'PT6H', 'P1D'],
@ -400,14 +464,14 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'firstCacheTimeout',
type: 'number',
defaultValue: 0,
defined: typeIs('cachedNamespace'),
defined: typeIsKnown(KNOWN_TYPES, 'cachedNamespace'),
info: `How long to wait (in ms) for the first run of the cache to populate. 0 indicates to not wait`,
},
{
name: 'injective',
type: 'boolean',
defaultValue: false,
defined: typeIs('cachedNamespace'),
defined: typeIsKnown(KNOWN_TYPES, 'cachedNamespace'),
info: `If the underlying map is injective (keys and values are unique) then optimizations can occur internally by setting this to true`,
},
@ -415,7 +479,7 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
{
name: 'kafkaTopic',
type: 'string',
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
required: true,
info: `The Kafka topic to read the data from`,
},
@ -423,7 +487,7 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'kafkaProperties',
type: 'json',
height: '100px',
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
required: true,
issueWithValue: value => {
if (!value) return 'kafkaProperties must be defined';
@ -436,14 +500,14 @@ export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
name: 'connectTimeout',
type: 'number',
defaultValue: 0,
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
info: `How long to wait for an initial connection`,
},
{
name: 'isOneToOne',
type: 'boolean',
defaultValue: false,
defined: typeIs('kafka'),
defined: typeIsKnown(KNOWN_TYPES, 'kafka'),
info: `If the underlying map is one-to-one (keys and values are unique) then optimizations can occur internally by setting this to true`,
},
];

View File

@ -22,7 +22,7 @@ import React from 'react';
import type { Field } from '../../components';
import { ExternalLink } from '../../components';
import { getLink } from '../../links';
import { filterMap, typeIs } from '../../utils';
import { filterMap, typeIsKnown } from '../../utils';
import type { SampleResponse } from '../../utils/sampler';
import { guessColumnTypeFromSampleResponse } from '../ingestion-spec/ingestion-spec';
@ -48,6 +48,29 @@ export interface MetricSpec {
readonly k?: number;
}
const KNOWN_TYPES = [
'count',
'longSum',
'doubleSum',
'floatSum',
'longMin',
'doubleMin',
'floatMin',
'longMax',
'doubleMax',
'floatMax',
'stringFirst',
'stringLast',
'thetaSketch',
'arrayOfDoublesSketch',
'HLLSketchBuild',
'HLLSketchMerge',
'quantilesDoublesSketch',
'momentSketch',
'fixedBucketsHistogram',
'hyperUnique',
'filtered',
];
export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'name',
@ -95,7 +118,8 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'fieldName',
type: 'string',
defined: typeIs(
defined: typeIsKnown(
KNOWN_TYPES,
'longSum',
'doubleSum',
'floatSum',
@ -122,32 +146,32 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
name: 'maxStringBytes',
type: 'number',
defaultValue: 1024,
defined: typeIs('stringFirst', 'stringLast'),
defined: typeIsKnown(KNOWN_TYPES, 'stringFirst', 'stringLast'),
},
{
name: 'filterNullValues',
type: 'boolean',
defaultValue: false,
defined: typeIs('stringFirst', 'stringLast'),
defined: typeIsKnown(KNOWN_TYPES, 'stringFirst', 'stringLast'),
},
// filtered
{
name: 'filter',
type: 'json',
defined: typeIs('filtered'),
defined: typeIsKnown(KNOWN_TYPES, 'filtered'),
required: true,
},
{
name: 'aggregator',
type: 'json',
defined: typeIs('filtered'),
defined: typeIsKnown(KNOWN_TYPES, 'filtered'),
required: true,
},
// thetaSketch
{
name: 'size',
type: 'number',
defined: typeIs('thetaSketch'),
defined: typeIsKnown(KNOWN_TYPES, 'thetaSketch'),
defaultValue: 16384,
info: (
<>
@ -171,7 +195,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'isInputThetaSketch',
type: 'boolean',
defined: typeIs('thetaSketch'),
defined: typeIsKnown(KNOWN_TYPES, 'thetaSketch'),
defaultValue: false,
info: (
<>
@ -185,7 +209,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'nominalEntries',
type: 'number',
defined: typeIs('arrayOfDoublesSketch'),
defined: typeIsKnown(KNOWN_TYPES, 'arrayOfDoublesSketch'),
defaultValue: 16384,
info: (
<>
@ -207,7 +231,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'metricColumns',
type: 'string-array',
defined: typeIs('arrayOfDoublesSketch'),
defined: typeIsKnown(KNOWN_TYPES, 'arrayOfDoublesSketch'),
info: (
<>
If building sketches from raw data, an array of names of the input columns containing
@ -218,7 +242,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'numberOfValues',
type: 'number',
defined: typeIs('arrayOfDoublesSketch'),
defined: typeIsKnown(KNOWN_TYPES, 'arrayOfDoublesSketch'),
placeholder: 'metricColumns length or 1',
info: <>Number of values associated with each distinct key.</>,
},
@ -226,7 +250,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'lgK',
type: 'number',
defined: typeIs('HLLSketchBuild', 'HLLSketchMerge'),
defined: typeIsKnown(KNOWN_TYPES, 'HLLSketchBuild', 'HLLSketchMerge'),
defaultValue: 12,
info: (
<>
@ -241,7 +265,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'tgtHllType',
type: 'string',
defined: typeIs('HLLSketchBuild', 'HLLSketchMerge'),
defined: typeIsKnown(KNOWN_TYPES, 'HLLSketchBuild', 'HLLSketchMerge'),
defaultValue: 'HLL_4',
suggestions: ['HLL_4', 'HLL_6', 'HLL_8'],
info: (
@ -255,7 +279,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'k',
type: 'number',
defined: typeIs('quantilesDoublesSketch'),
defined: typeIsKnown(KNOWN_TYPES, 'quantilesDoublesSketch'),
defaultValue: 128,
info: (
<>
@ -277,7 +301,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'k',
type: 'number',
defined: typeIs('momentSketch'),
defined: typeIsKnown(KNOWN_TYPES, 'momentSketch'),
required: true,
info: (
<>
@ -289,7 +313,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'compress',
type: 'boolean',
defined: typeIs('momentSketch'),
defined: typeIsKnown(KNOWN_TYPES, 'momentSketch'),
defaultValue: true,
info: (
<>
@ -303,21 +327,21 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'lowerLimit',
type: 'number',
defined: typeIs('fixedBucketsHistogram'),
defined: typeIsKnown(KNOWN_TYPES, 'fixedBucketsHistogram'),
required: true,
info: <>Lower limit of the histogram.</>,
},
{
name: 'upperLimit',
type: 'number',
defined: typeIs('fixedBucketsHistogram'),
defined: typeIsKnown(KNOWN_TYPES, 'fixedBucketsHistogram'),
required: true,
info: <>Upper limit of the histogram.</>,
},
{
name: 'numBuckets',
type: 'number',
defined: typeIs('fixedBucketsHistogram'),
defined: typeIsKnown(KNOWN_TYPES, 'fixedBucketsHistogram'),
defaultValue: 10,
required: true,
info: (
@ -330,7 +354,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'outlierHandlingMode',
type: 'string',
defined: typeIs('fixedBucketsHistogram'),
defined: typeIsKnown(KNOWN_TYPES, 'fixedBucketsHistogram'),
required: true,
suggestions: ['ignore', 'overflow', 'clip'],
info: (
@ -356,7 +380,7 @@ export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
{
name: 'isInputHyperUnique',
type: 'boolean',
defined: typeIs('hyperUnique'),
defined: typeIsKnown(KNOWN_TYPES, 'hyperUnique'),
defaultValue: false,
info: (
<>

View File

@ -16,6 +16,7 @@
* limitations under the License.
*/
export * from './async-query/async-query.mock';
export * from './execution/execution-ingest-complete.mock';
export * from './execution/execution-ingest-error.mock';
export * from './stages/stages.mock';

View File

@ -62,6 +62,7 @@ export interface StageDefinition {
};
maxWorkerCount: number;
shuffleCheckHasMultipleValues?: boolean;
maxInputBytesPerWorker?: number;
};
phase?: 'NEW' | 'READING_INPUT' | 'POST_READING' | 'RESULTS_READY' | 'FINISHED' | 'FAILED';
workerCount?: number;
@ -74,7 +75,7 @@ export interface StageDefinition {
export interface ClusterBy {
columns: {
columnName: string;
descending?: boolean;
order?: 'ASCENDING' | 'DESCENDING';
}[];
bucketByCount?: number;
}
@ -94,7 +95,9 @@ export function formatClusterBy(
}
}
return columns.map(part => part.columnName + (part.descending ? ' DESC' : '')).join(', ');
return columns
.map(part => part.columnName + (part.order === 'DESCENDING' ? ' DESC' : ''))
.join(', ');
}
export interface StageWorkerCounter {

View File

@ -0,0 +1,102 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import type { StageDefinition } from '../stages/stages';
export type TaskStatus = 'WAITING' | 'PENDING' | 'RUNNING' | 'FAILED' | 'SUCCESS';
export type TaskStatusWithCanceled = TaskStatus | 'CANCELED';
export interface TaskStatusResponse {
task: string;
status: {
status: TaskStatus;
error?: any;
};
}
export interface MsqTaskPayloadResponse {
task: string;
payload: {
type: 'query_controller';
id: string;
spec: {
query: Record<string, any>;
columnMappings: {
queryColumn: string;
outputColumn: string;
}[];
destination:
| {
type: 'taskReport';
}
| {
type: 'dataSource';
dataSource: string;
segmentGranularity: string | { type: string };
replaceTimeChunks: string[];
};
assignmentStrategy: 'max' | 'auto';
tuningConfig: Record<string, any>;
};
sqlQuery: string;
sqlQueryContext: Record<string, any>;
sqlResultsContext: Record<string, any>;
sqlTypeNames: string[];
context: Record<string, any>;
groupId: string;
dataSource: string;
resource: {
availabilityGroup: string;
requiredCapacity: number;
};
};
}
export interface MsqTaskReportResponse {
multiStageQuery: {
type: 'multiStageQuery';
taskId: string;
payload: {
status: {
status: string;
errorReport?: MsqTaskErrorReport;
warnings?: MsqTaskErrorReport[];
startTime: string;
durationMs: number;
pendingTasks: number;
runningTasks: number;
};
stages: StageDefinition[];
counters: Record<string, Record<string, any>>;
};
};
error?: any;
}
export interface MsqTaskErrorReport {
taskId: string;
host: string;
error: {
errorCode: string;
errorMessage: string;
maxWarnings?: number;
rootErrorCode?: string;
};
stageNumber?: number;
exceptionStackTrace?: string;
}

View File

@ -16,8 +16,8 @@
* limitations under the License.
*/
import type { SqlValues, SqlWithQuery } from 'druid-query-toolkit';
import { SqlExpression, SqlQuery, T } from 'druid-query-toolkit';
import type { SqlValues, SqlWithQuery } from '@druid-toolkit/query';
import { SqlExpression, SqlQuery, T } from '@druid-toolkit/query';
import Hjson from 'hjson';
import * as JSONBig from 'json-bigint-native';

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { sane } from 'druid-query-toolkit';
import { sane } from '@druid-toolkit/query';
import { WorkbenchQuery } from './workbench-query';
import { WorkbenchQueryPart } from './workbench-query-part';
@ -138,7 +138,6 @@ describe('WorkbenchQuery', () => {
`End of input while parsing an object (missing '}') at line 40,2 >>>} ...`,
),
).toEqual({
match: '',
row: 39,
column: 1,
});
@ -421,9 +420,11 @@ describe('WorkbenchQuery', () => {
const apiQuery = workbenchQuery.getApiQuery(makeQueryId);
expect(apiQuery).toEqual({
cancelQueryId: undefined,
engine: 'sql-msq-task',
query: {
context: {
executionMode: 'async',
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
useCache: false,

View File

@ -21,7 +21,7 @@ import type {
SqlExpression,
SqlPartitionedByClause,
SqlQuery,
} from 'druid-query-toolkit';
} from '@druid-toolkit/query';
import {
C,
F,
@ -29,7 +29,7 @@ import {
SqlOrderByClause,
SqlOrderByExpression,
SqlTable,
} from 'druid-query-toolkit';
} from '@druid-toolkit/query';
import Hjson from 'hjson';
import * as JSONBig from 'json-bigint-native';
import { v4 as uuidv4 } from 'uuid';
@ -71,8 +71,6 @@ export interface WorkbenchQueryValue {
}
export class WorkbenchQuery {
static INLINE_DATASOURCE_MARKER = '__query_select';
private static enabledQueryEngines: DruidEngine[] = ['native', 'sql-native'];
static blank(): WorkbenchQuery {
@ -228,7 +226,7 @@ export class WorkbenchQuery {
static getRowColumnFromIssue(issue: string): RowColumn | undefined {
const m = issue.match(/at line (\d+),(\d+)/);
if (!m) return;
return { match: '', row: Number(m[1]) - 1, column: Number(m[2]) - 1 };
return { row: Number(m[1]) - 1, column: Number(m[2]) - 1 };
}
public readonly queryParts: WorkbenchQueryPart[];
@ -622,6 +620,7 @@ export class WorkbenchQuery {
}
if (engine === 'sql-msq-task') {
apiQuery.context.executionMode ??= 'async';
apiQuery.context.finalizeAggregations ??= !ingestQuery;
apiQuery.context.groupByEnableMultiValueUnnesting ??= !ingestQuery;
}

View File

@ -20,7 +20,7 @@ import 'core-js/stable';
import 'regenerator-runtime/runtime';
import './bootstrap/ace';
import { QueryRunner } from 'druid-query-toolkit';
import { QueryRunner } from '@druid-toolkit/query';
import React from 'react';
import { createRoot } from 'react-dom/client';

View File

@ -16,8 +16,8 @@
* limitations under the License.
*/
import type { QueryResult } from '@druid-toolkit/query';
import type { CancelToken } from 'axios';
import type { QueryResult } from 'druid-query-toolkit';
import type { Execution } from '../../druid-models';
import { IntermediateQueryState } from '../../utils';

View File

@ -16,10 +16,10 @@
* limitations under the License.
*/
import { L, QueryResult } from '@druid-toolkit/query';
import type { AxiosResponse, CancelToken } from 'axios';
import { L, QueryResult } from 'druid-query-toolkit';
import type { QueryContext } from '../../druid-models';
import type { AsyncStatusResponse, QueryContext } from '../../druid-models';
import { Execution } from '../../druid-models';
import { Api } from '../../singletons';
import {
@ -31,6 +31,8 @@ import {
} from '../../utils';
import { maybeGetClusterCapacity } from '../capacity';
const USE_TASK_PAYLOAD = true;
const USE_TASK_REPORTS = true;
const WAIT_FOR_SEGMENT_METADATA_TIMEOUT = 180000; // 3 minutes to wait until segments appear in the metadata
const WAIT_FOR_SEGMENT_LOAD_TIMEOUT = 540000; // 9 minutes to wait for segments to load at all
@ -85,27 +87,32 @@ export async function submitTaskQuery(
}
}
let sqlTaskResp: AxiosResponse;
let sqlAsyncResp: AxiosResponse<AsyncStatusResponse>;
try {
sqlTaskResp = await Api.instance.post(`/druid/v2/sql/task`, jsonQuery, { cancelToken });
sqlAsyncResp = await Api.instance.post<AsyncStatusResponse>(
`/druid/v2/sql/statements`,
jsonQuery,
{
cancelToken,
},
);
} catch (e) {
const druidError = deepGet(e, 'response.data.error');
const druidError = deepGet(e, 'response.data');
if (!druidError) throw e;
throw new DruidError(druidError, prefixLines);
}
const sqlTaskPayload = sqlTaskResp.data;
const sqlAsyncStatus = sqlAsyncResp.data;
if (!sqlTaskPayload.taskId) {
if (!Array.isArray(sqlTaskPayload)) throw new Error('unexpected task payload');
if (!sqlAsyncStatus.queryId) {
if (!Array.isArray(sqlAsyncStatus)) throw new Error('unexpected task payload');
return Execution.fromResult(
'sql-msq-task',
QueryResult.fromRawResult(sqlTaskPayload, false, true, true, true),
QueryResult.fromRawResult(sqlAsyncStatus, false, true, true, true),
);
}
let execution = Execution.fromTaskSubmit(sqlTaskPayload, sqlQuery, context);
let execution = Execution.fromAsyncStatus(sqlAsyncStatus, sqlQuery, context);
if (onSubmitted) {
onSubmitted(execution.id);
@ -161,9 +168,7 @@ export async function updateExecutionWithTaskIfNeeded(
if (!execution.isWaitingForQuery()) return execution;
// Inherit old payload so as not to re-query it
return execution.updateWith(
await getTaskExecution(execution.id, execution._payload, cancelToken),
);
return await getTaskExecution(execution.id, execution._payload, cancelToken);
}
export async function getTaskExecution(
@ -173,59 +178,68 @@ export async function getTaskExecution(
): Promise<Execution> {
const encodedId = Api.encodePath(id);
let taskPayloadResp: AxiosResponse | undefined;
if (!taskPayloadOverride) {
let execution: Execution | undefined;
if (USE_TASK_REPORTS) {
let taskReport: any;
try {
taskPayloadResp = await Api.instance.get(`/druid/indexer/v1/task/${encodedId}`, {
taskReport = (
await Api.instance.get(`/druid/indexer/v1/task/${encodedId}/reports`, {
cancelToken,
})
).data;
} catch (e) {
if (Api.isNetworkError(e)) throw e;
}
if (taskReport) {
try {
execution = Execution.fromTaskReport(taskReport);
} catch {
// We got a bad payload, wait a bit and try to get the payload again (also log it)
// This whole catch block is a hack, and we should make the detail route more robust
console.error(
`Got unusable response from the reports endpoint (/druid/indexer/v1/task/${encodedId}/reports) going to retry`,
);
console.log('Report response:', taskReport);
}
}
}
if (!execution) {
const statusResp = await Api.instance.get<AsyncStatusResponse>(
`/druid/v2/sql/statements/${encodedId}`,
{
cancelToken,
});
},
);
execution = Execution.fromAsyncStatus(statusResp.data);
}
let taskPayload: any = taskPayloadOverride;
if (USE_TASK_PAYLOAD && !taskPayload) {
try {
taskPayload = (
await Api.instance.get(`/druid/indexer/v1/task/${encodedId}`, {
cancelToken,
})
).data;
} catch (e) {
if (Api.isNetworkError(e)) throw e;
}
}
let taskReportResp: AxiosResponse | undefined;
try {
taskReportResp = await Api.instance.get(`/druid/indexer/v1/task/${encodedId}/reports`, {
cancelToken,
});
} catch (e) {
if (Api.isNetworkError(e)) throw e;
if (taskPayload) {
execution = execution.updateWithTaskPayload(taskPayload);
}
if ((taskPayloadResp || taskPayloadOverride) && taskReportResp) {
let execution: Execution | undefined;
try {
execution = Execution.fromTaskPayloadAndReport(
taskPayloadResp ? taskPayloadResp.data : taskPayloadOverride,
taskReportResp.data,
);
} catch {
// We got a bad payload, wait a bit and try to get the payload again (also log it)
// This whole catch block is a hack, and we should make the detail route more robust
console.error(
`Got unusable response from the reports endpoint (/druid/indexer/v1/task/${encodedId}/reports) going to retry`,
);
console.log('Report response:', taskReportResp.data);
}
if (execution) {
if (execution?.hasPotentiallyStuckStage()) {
const capacityInfo = await maybeGetClusterCapacity();
if (capacityInfo) {
execution = execution.changeCapacityInfo(capacityInfo);
}
}
return execution;
if (execution.hasPotentiallyStuckStage()) {
const capacityInfo = await maybeGetClusterCapacity();
if (capacityInfo) {
execution = execution.changeCapacityInfo(capacityInfo);
}
}
const statusResp = await Api.instance.get(`/druid/indexer/v1/task/${encodedId}/status`, {
cancelToken,
});
return Execution.fromTaskStatus(statusResp.data);
return execution;
}
export async function updateExecutionWithDatasourceLoadedIfNeeded(
@ -248,15 +262,10 @@ export async function updateExecutionWithDatasourceLoadedIfNeeded(
return execution.markDestinationDatasourceLoaded();
}
// Ideally we would have a more accurate query here, instead of
// COUNT(*) FILTER (WHERE is_published = 1 AND is_available = 0)
// we want to filter on something like
// COUNT(*) FILTER (WHERE is_should_be_available = 1 AND is_available = 0)
// `is_published` does not quite capture what we want but this is the best we have for now.
const segmentCheck = await queryDruidSql({
query: `SELECT
COUNT(*) AS num_segments,
COUNT(*) FILTER (WHERE is_published = 1 AND is_available = 0) AS loading_segments
COUNT(*) FILTER (WHERE is_published = 1 AND is_available = 0 AND replication_factor <> 0) AS loading_segments
FROM sys.segments
WHERE datasource = ${L(execution.destination.dataSource)} AND is_overshadowed = 0`,
});

View File

@ -24,7 +24,7 @@ import {
SqlExpression,
SqlType,
T,
} from 'druid-query-toolkit';
} from '@druid-toolkit/query';
import * as JSONBig from 'json-bigint-native';
import type {

View File

@ -18,8 +18,8 @@
import type { IconName } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import type { SqlExpression } from 'druid-query-toolkit';
import { C, F } from 'druid-query-toolkit';
import type { SqlExpression } from '@druid-toolkit/query';
import { C, F } from '@druid-toolkit/query';
import type { Filter } from 'react-table';
import { addOrUpdate, caseInsensitiveContains, filterMap } from '../utils';

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import type { QueryResult } from 'druid-query-toolkit';
import type { QueryResult } from '@druid-toolkit/query';
export interface WorkbenchRunningPromise {
promise: Promise<QueryResult>;

View File

@ -18,6 +18,7 @@
import type { IconName, Intent } from '@blueprintjs/core';
import { Menu, MenuItem } from '@blueprintjs/core';
import type { JSX } from 'react';
import React from 'react';
export interface BasicAction {

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import type { QueryResult } from 'druid-query-toolkit';
import type { QueryResult } from '@druid-toolkit/query';
import FileSaver from 'file-saver';
import * as JSONBig from 'json-bigint-native';

View File

@ -16,41 +16,43 @@
* limitations under the License.
*/
import { sane } from 'druid-query-toolkit';
import { sane } from '@druid-toolkit/query';
import { DruidError, getDruidErrorMessage } from './druid-query';
describe('DruidQuery', () => {
describe('DruidError.parsePosition', () => {
it('works for single error 1', () => {
const message = `Encountered "COUNT" at line 2, column 12. Was expecting one of: <EOF> "AS" ... "EXCEPT" ... "FETCH" ... "FROM" ... "INTERSECT" ... "LIMIT" ...`;
expect(DruidError.parsePosition(message)).toEqual({
match: 'at line 2, column 12',
expect(
DruidError.extractPosition({
sourceType: 'sql',
line: '2',
column: '12',
token: "AS \\'l\\'",
expected: '...',
}),
).toEqual({
row: 1,
column: 11,
});
});
it('works for single error 2', () => {
const message = `org.apache.calcite.runtime.CalciteContextException: At line 2, column 20: Unknown identifier '*'`;
expect(DruidError.parsePosition(message)).toEqual({
match: 'At line 2, column 20',
row: 1,
column: 19,
});
});
it('works for range', () => {
const message = `org.apache.calcite.runtime.CalciteContextException: From line 2, column 13 to line 2, column 25: No match found for function signature SUMP(<NUMERIC>)`;
expect(DruidError.parsePosition(message)).toEqual({
match: 'From line 2, column 13 to line 2, column 25',
row: 1,
column: 12,
endRow: 1,
endColumn: 25,
expect(
DruidError.extractPosition({
sourceType: 'sql',
line: '1',
column: '16',
endLine: '1',
endColumn: '17',
token: "AS \\'l\\'",
expected: '...',
}),
).toEqual({
row: 0,
column: 15,
endRow: 0,
endColumn: 16,
});
});
});
@ -62,7 +64,9 @@ describe('DruidQuery', () => {
FROM wikipedia -- test ==
WHERE channel == '#ar.wikipedia'
`;
const suggestion = DruidError.getSuggestion(`Encountered "= =" at line 3, column 15.`);
const suggestion = DruidError.getSuggestion(
`Received an unexpected token [= =] (line [3], column [15]), acceptable options:`,
);
expect(suggestion!.label).toEqual(`Replace == with =`);
expect(suggestion!.fn(sql)).toEqual(sane`
SELECT *
@ -81,7 +85,7 @@ describe('DruidQuery', () => {
ORDER BY 2 DESC
`;
const suggestion = DruidError.getSuggestion(
`Encountered "= =" at line 4, column 15. Was expecting one of: <EOF> "EXCEPT" ... "FETCH" ... "GROUP" ...`,
`Received an unexpected token [= =] (line [4], column [15]), acceptable options:`,
);
expect(suggestion!.label).toEqual(`Replace == with =`);
expect(suggestion!.fn(sql)).toEqual(sane`
@ -140,7 +144,7 @@ describe('DruidQuery', () => {
WHERE channel = "#ar.wikipedia"
`;
const suggestion = DruidError.getSuggestion(
`org.apache.calcite.runtime.CalciteContextException: From line 3, column 17 to line 3, column 31: Column '#ar.wikipedia' not found in any table`,
`Column '#ar.wikipedia' not found in any table (line [3], column [17])`,
);
expect(suggestion!.label).toEqual(`Replace "#ar.wikipedia" with '#ar.wikipedia'`);
expect(suggestion!.fn(sql)).toEqual(sane`
@ -151,41 +155,43 @@ describe('DruidQuery', () => {
});
it('works for incorrectly quoted AS alias', () => {
const suggestion = DruidError.getSuggestion(`Encountered "AS \\'c\\'" at line 1, column 16.`);
expect(suggestion!.label).toEqual(`Replace 'c' with "c"`);
expect(suggestion!.fn(`SELECT channel AS 'c' FROM wikipedia`)).toEqual(
`SELECT channel AS "c" FROM wikipedia`,
const sql = `SELECT channel AS 'c' FROM wikipedia`;
const suggestion = DruidError.getSuggestion(
`Received an unexpected token [AS \\'c\\'] (line [1], column [16]), acceptable options:`,
);
expect(suggestion!.label).toEqual(`Replace 'c' with "c"`);
expect(suggestion!.fn(sql)).toEqual(`SELECT channel AS "c" FROM wikipedia`);
});
it('removes comma (,) before FROM', () => {
const sql = `SELECT page, FROM wikipedia WHERE channel = '#ar.wikipedia'`;
const suggestion = DruidError.getSuggestion(
`Encountered ", FROM" at line 1, column 12. Was expecting one of: "ABS" ...`,
`Received an unexpected token [, FROM] (line [1], column [12]), acceptable options:`,
);
expect(suggestion!.label).toEqual(`Remove , before FROM`);
expect(suggestion!.fn(`SELECT page, FROM wikipedia WHERE channel = '#ar.wikipedia'`)).toEqual(
expect(suggestion!.label).toEqual(`Remove comma (,) before FROM`);
expect(suggestion!.fn(sql)).toEqual(
`SELECT page FROM wikipedia WHERE channel = '#ar.wikipedia'`,
);
});
it('removes comma (,) before ORDER', () => {
const sql = `SELECT page FROM wikipedia WHERE channel = '#ar.wikipedia' GROUP BY 1, ORDER BY 1`;
const suggestion = DruidError.getSuggestion(
`Encountered ", ORDER" at line 1, column 14. Was expecting one of: "ABS" ...`,
`Received an unexpected token [, ORDER] (line [1], column [70]), acceptable options:`,
);
expect(suggestion!.label).toEqual(`Remove comma (,) before ORDER`);
expect(suggestion!.fn(sql)).toEqual(
`SELECT page FROM wikipedia WHERE channel = '#ar.wikipedia' GROUP BY 1 ORDER BY 1`,
);
expect(suggestion!.label).toEqual(`Remove , before ORDER`);
expect(
suggestion!.fn(
`SELECT page FROM wikipedia WHERE channel = '#ar.wikipedia' GROUP BY 1, ORDER BY 1`,
),
).toEqual(`SELECT page FROM wikipedia WHERE channel = '#ar.wikipedia' GROUP BY 1 ORDER BY 1`);
});
it('removes trailing semicolon (;)', () => {
const sql = `SELECT page FROM wikipedia WHERE channel = '#ar.wikipedia';`;
const suggestion = DruidError.getSuggestion(
`Encountered ";" at line 1, column 59. Was expecting one of: "ABS" ...`,
`Received an unexpected token [;] (line [1], column [59]), acceptable options:`,
);
expect(suggestion!.label).toEqual(`Remove trailing ;`);
expect(suggestion!.fn(`SELECT page FROM wikipedia WHERE channel = '#ar.wikipedia';`)).toEqual(
expect(suggestion!.label).toEqual(`Remove trailing semicolon (;)`);
expect(suggestion!.fn(sql)).toEqual(
`SELECT page FROM wikipedia WHERE channel = '#ar.wikipedia'`,
);
});

View File

@ -16,9 +16,9 @@
* limitations under the License.
*/
import { C } from '@druid-toolkit/query';
import type { AxiosResponse } from 'axios';
import axios from 'axios';
import { C } from 'druid-query-toolkit';
import { Api } from '../singletons';
@ -27,9 +27,31 @@ import type { RowColumn } from './query-cursor';
const CANCELED_MESSAGE = 'Query canceled by user.';
export interface DruidErrorResponse {
// https://github.com/apache/druid/blob/master/processing/src/main/java/org/apache/druid/error/DruidException.java#L292
export type ErrorResponsePersona = 'USER' | 'ADMIN' | 'OPERATOR' | 'DEVELOPER';
// https://github.com/apache/druid/blob/master/processing/src/main/java/org/apache/druid/error/DruidException.java#L321
export type ErrorResponseCategory =
| 'DEFENSIVE'
| 'INVALID_INPUT'
| 'UNAUTHORIZED'
| 'FORBIDDEN'
| 'CAPACITY_EXCEEDED'
| 'CANCELED'
| 'RUNTIME_FAILURE'
| 'TIMEOUT'
| 'UNSUPPORTED'
| 'UNCATEGORIZED';
export interface ErrorResponse {
persona: ErrorResponsePersona;
category: ErrorResponseCategory;
errorCode?: string;
errorMessage: string; // a message for the intended audience
context?: Record<string, any>; // a map of extra context values that might be helpful
// Deprecated as per https://github.com/apache/druid/blob/master/processing/src/main/java/org/apache/druid/error/ErrorResponse.java
error?: string;
errorMessage?: string;
errorClass?: string;
host?: string;
}
@ -51,7 +73,7 @@ export function parseHtmlError(htmlStr: string): string | undefined {
.replace(/&gt;/g, '>');
}
function getDruidErrorObject(e: any): DruidErrorResponse | string {
function errorResponseFromWhatever(e: any): ErrorResponse | string {
if (e.response) {
// This is a direct axios response error
let data = e.response.data || {};
@ -64,7 +86,7 @@ function getDruidErrorObject(e: any): DruidErrorResponse | string {
}
export function getDruidErrorMessage(e: any): string {
const data = getDruidErrorObject(e);
const data = errorResponseFromWhatever(e);
switch (typeof data) {
case 'object':
return (
@ -87,30 +109,20 @@ export function getDruidErrorMessage(e: any): string {
}
export class DruidError extends Error {
static parsePosition(errorMessage: string): RowColumn | undefined {
const range = /from line (\d+), column (\d+) to line (\d+), column (\d+)/i.exec(
String(errorMessage),
);
if (range) {
return {
match: range[0],
row: Number(range[1]) - 1,
column: Number(range[2]) - 1,
endRow: Number(range[3]) - 1,
endColumn: Number(range[4]), // No -1 because we need to include the last char
};
static extractPosition(context: Record<string, any> | undefined): RowColumn | undefined {
if (context?.sourceType !== 'sql' || !context.line || !context.column) return;
const rowColumn: RowColumn = {
row: Number(context.line) - 1,
column: Number(context.column) - 1,
};
if (context.endLine && context.endColumn) {
rowColumn.endRow = Number(context.endLine) - 1;
rowColumn.endColumn = Number(context.endColumn) - 1;
}
const single = /at line (\d+), column (\d+)/i.exec(String(errorMessage));
if (single) {
return {
match: single[0],
row: Number(single[1]) - 1,
column: Number(single[2]) - 1,
};
}
return;
return rowColumn;
}
static positionToIndex(str: string, line: number, column: number): number {
@ -123,8 +135,9 @@ export class DruidError extends Error {
static getSuggestion(errorMessage: string): QuerySuggestion | undefined {
// == is used instead of =
// ex: SELECT * FROM wikipedia WHERE channel == '#en.wikipedia'
// ex: Encountered "= =" at line 3, column 15. Was expecting one of
const matchEquals = /Encountered "= =" at line (\d+), column (\d+)./.exec(errorMessage);
// er: Received an unexpected token [= =] (line [1], column [39]), acceptable options:
const matchEquals =
/Received an unexpected token \[= =] \(line \[(\d+)], column \[(\d+)]\),/.exec(errorMessage);
if (matchEquals) {
const line = Number(matchEquals[1]);
const column = Number(matchEquals[2]);
@ -140,6 +153,7 @@ export class DruidError extends Error {
// Mangled quotes from copy/paste
// ex: SELECT * FROM wikipedia WHERE channel = #en.wikipedia
// er: Lexical error at line 1, column 41. Encountered: "\u2018"
const matchLexical =
/Lexical error at line (\d+), column (\d+).\s+Encountered: "\\u201\w"/.exec(errorMessage);
if (matchLexical) {
@ -157,15 +171,15 @@ export class DruidError extends Error {
// Incorrect quoting on table column
// ex: SELECT * FROM wikipedia WHERE channel = "#en.wikipedia"
// ex: org.apache.calcite.runtime.CalciteContextException: From line 3, column 17 to line 3, column 31: Column '#ar.wikipedia' not found in any table
// er: Column '#en.wikipedia' not found in any table (line [1], column [41])
const matchQuotes =
/org.apache.calcite.runtime.CalciteContextException: From line (\d+), column (\d+) to line \d+, column \d+: Column '([^']+)' not found in any table/.exec(
/Column '([^']+)' not found in any table \(line \[(\d+)], column \[(\d+)]\)/.exec(
errorMessage,
);
if (matchQuotes) {
const line = Number(matchQuotes[1]);
const column = Number(matchQuotes[2]);
const literalString = matchQuotes[3];
const literalString = matchQuotes[1];
const line = Number(matchQuotes[2]);
const column = Number(matchQuotes[3]);
return {
label: `Replace "${literalString}" with '${literalString}'`,
fn: str => {
@ -180,7 +194,10 @@ export class DruidError extends Error {
// Single quotes on AS alias
// ex: SELECT channel AS 'c' FROM wikipedia
const matchSingleQuotesAlias = /Encountered "AS \\'([\w-]+)\\'" at/i.exec(errorMessage);
// er: Received an unexpected token [AS \'c\'] (line [1], column [16]), acceptable options:
const matchSingleQuotesAlias = /Received an unexpected token \[AS \\'([\w-]+)\\']/i.exec(
errorMessage,
);
if (matchSingleQuotesAlias) {
const alias = matchSingleQuotesAlias[1];
return {
@ -193,13 +210,16 @@ export class DruidError extends Error {
};
}
// , before FROM, GROUP, ORDER, or LIMIT
// Comma (,) before FROM, GROUP, ORDER, or LIMIT
// ex: SELECT channel, FROM wikipedia
const matchComma = /Encountered ", (FROM|GROUP|ORDER|LIMIT)" at/i.exec(errorMessage);
// er: Received an unexpected token [, FROM] (line [1], column [15]), acceptable options:
const matchComma = /Received an unexpected token \[, (FROM|GROUP|ORDER|LIMIT)]/i.exec(
errorMessage,
);
if (matchComma) {
const keyword = matchComma[1];
return {
label: `Remove , before ${keyword}`,
label: `Remove comma (,) before ${keyword}`,
fn: str => {
const newQuery = str.replace(new RegExp(`,(\\s+${keyword})`, 'gim'), '$1');
if (newQuery === str) return;
@ -208,15 +228,16 @@ export class DruidError extends Error {
};
}
// ; at the end. https://bit.ly/1n1yfkJ
// Semicolon (;) at the end. https://bit.ly/1n1yfkJ
// ex: SELECT 1;
// ex: Encountered ";" at line 6, column 16.
const matchSemicolon = /Encountered ";" at line (\d+), column (\d+)./i.exec(errorMessage);
// ex: Received an unexpected token [;] (line [1], column [9]), acceptable options:
const matchSemicolon =
/Received an unexpected token \[;] \(line \[(\d+)], column \[(\d+)]\),/i.exec(errorMessage);
if (matchSemicolon) {
const line = Number(matchSemicolon[1]);
const column = Number(matchSemicolon[2]);
return {
label: `Remove trailing ;`,
label: `Remove trailing semicolon (;)`,
fn: str => {
const index = DruidError.positionToIndex(str, line, column);
if (str[index] !== ';') return;
@ -229,49 +250,50 @@ export class DruidError extends Error {
}
public canceled?: boolean;
public error?: string;
public persona?: ErrorResponsePersona;
public category?: ErrorResponseCategory;
public context?: Record<string, any>;
public errorMessage?: string;
public errorMessageWithoutExpectation?: string;
public expectation?: string;
public position?: RowColumn;
public errorClass?: string;
public host?: string;
public suggestion?: QuerySuggestion;
constructor(e: any, removeLines?: number) {
// Depricated
public error?: string;
public errorClass?: string;
public host?: string;
constructor(e: any, skipLines = 0) {
super(axios.isCancel(e) ? CANCELED_MESSAGE : getDruidErrorMessage(e));
if (axios.isCancel(e)) {
this.canceled = true;
} else {
const data = getDruidErrorObject(e);
const data = errorResponseFromWhatever(e);
let druidErrorResponse: DruidErrorResponse;
let druidErrorResponse: ErrorResponse;
switch (typeof data) {
case 'object':
druidErrorResponse = data;
break;
case 'string':
default:
druidErrorResponse = {
errorClass: 'HTML error',
};
break;
default:
druidErrorResponse = {};
} as any; // ToDo
break;
}
Object.assign(this, druidErrorResponse);
if (this.errorMessage) {
if (removeLines) {
if (skipLines) {
this.errorMessage = this.errorMessage.replace(
/line (\d+),/g,
(_, c) => `line ${Number(c) - removeLines},`,
/line \[(\d+)],/g,
(_, c) => `line [${Number(c) - skipLines}],`,
);
}
this.position = DruidError.parsePosition(this.errorMessage);
this.position = DruidError.extractPosition(this.context);
this.suggestion = DruidError.getSuggestion(this.errorMessage);
const expectationIndex = this.errorMessage.indexOf('Was expecting one of');

View File

@ -23,6 +23,7 @@ import copy from 'copy-to-clipboard';
import hasOwnProp from 'has-own-prop';
import * as JSONBig from 'json-bigint-native';
import numeral from 'numeral';
import type { JSX } from 'react';
import React from 'react';
import { AppToaster } from '../singletons';
@ -61,7 +62,7 @@ export function wait(ms: number): Promise<void> {
});
}
export function clamp(n: number, min: number, max: number): number {
export function clamp(n: number, min = -Infinity, max = Infinity): number {
return Math.min(Math.max(n, min), max);
}
@ -89,8 +90,22 @@ export function caseInsensitiveContains(testString: string, searchString: string
return testString.toLowerCase().includes(searchString.toLowerCase());
}
export function oneOf<T>(thing: T, ...options: T[]): boolean {
return options.includes(thing);
function validateKnown<T>(allKnownValues: T[], options: T[]): void {
options.forEach(o => {
if (!allKnownValues.includes(o)) {
throw new Error(`allKnownValues (${allKnownValues.join(', ')}) must include '${o}'`);
}
});
}
export function oneOf<T>(value: T, ...options: T[]): boolean {
return options.includes(value);
}
export function oneOfKnown<T>(value: T, allKnownValues: T[], ...options: T[]): boolean | undefined {
validateKnown(allKnownValues, options);
if (options.includes(value)) return true;
return allKnownValues.includes(value) ? false : undefined;
}
export function typeIs<T extends { type?: S }, S = string>(...options: S[]): (x: T) => boolean {
@ -100,6 +115,19 @@ export function typeIs<T extends { type?: S }, S = string>(...options: S[]): (x:
};
}
export function typeIsKnown<T extends { type?: S }, S = string>(
allKnownValues: S[],
...options: S[]
): (x: T) => boolean | undefined {
validateKnown(allKnownValues, options);
return x => {
const value = x.type;
if (value == null) return;
if (options.includes(value)) return true;
return allKnownValues.includes(value) ? false : undefined;
};
}
export function without<T>(xs: readonly T[], x: T | undefined): T[] {
return xs.filter(i => i !== x);
}
@ -306,7 +334,7 @@ export function compact<T>(xs: (T | undefined | false | null | '')[]): T[] {
}
export function assemble<T>(...xs: (T | undefined | false | null | '')[]): T[] {
return xs.filter(Boolean) as T[];
return compact(xs);
}
export function moveToEnd<T>(

View File

@ -16,6 +16,8 @@
* limitations under the License.
*/
import { sum } from 'd3-array';
import { deepMove, deepSet } from './object-change';
export type RuleType =
@ -126,21 +128,17 @@ export class RuleUtil {
}
static totalReplicas(rule: Rule): number {
const tieredReplicants = rule.tieredReplicants || {};
let total = 0;
for (const k in tieredReplicants) {
total += tieredReplicants[k];
}
return total;
return sum(Object.values(rule.tieredReplicants || {}));
}
static isColdRule(rule: Rule): boolean {
static isZeroReplicaRule(rule: Rule): boolean {
return RuleUtil.canHaveTieredReplicants(rule) && RuleUtil.totalReplicas(rule) === 0;
}
static hasColdRule(rules: Rule[] | undefined, defaultRules: Rule[] | undefined): boolean {
static hasZeroReplicaRule(rules: Rule[] | undefined, defaultRules: Rule[] | undefined): boolean {
return (
(rules || []).some(RuleUtil.isColdRule) || (defaultRules || []).some(RuleUtil.isColdRule)
(rules || []).some(RuleUtil.isZeroReplicaRule) ||
(defaultRules || []).some(RuleUtil.isZeroReplicaRule)
);
}
}

View File

@ -16,6 +16,6 @@
* limitations under the License.
*/
import type { SqlQuery } from 'druid-query-toolkit';
import type { SqlQuery } from '@druid-toolkit/query';
export type QueryAction = (query: SqlQuery) => SqlQuery;

View File

@ -16,8 +16,8 @@
* limitations under the License.
*/
import type { SqlBase, SqlQuery } from 'druid-query-toolkit';
import { L } from 'druid-query-toolkit';
import type { SqlBase, SqlQuery } from '@druid-toolkit/query';
import { L } from '@druid-toolkit/query';
export const EMPTY_LITERAL = L('');
@ -37,7 +37,6 @@ export function prettyPrintSql(b: SqlBase): string {
}
export interface RowColumn {
match: string;
row: number;
column: number;
endRow?: number;
@ -55,7 +54,6 @@ export function findEmptyLiteralPosition(query: SqlQuery): RowColumn | undefined
const row = lines.length - 1;
const lastLine = lines[row];
return {
match: '',
row,
column: lastLine.length,
};

View File

@ -47,6 +47,10 @@ export interface QueryManagerOptions<Q, R, I = never, E extends Error = Error> {
export class QueryManager<Q, R, I = never, E extends Error = Error> {
static TERMINATION_MESSAGE = 'QUERY_MANAGER_TERMINATED';
static remapAxiosCancellationIntoError(e: any) {
return axios.isCancel(e) ? new Error(e.message ?? 'Browser request canceled') : e;
}
private readonly processQuery: (
query: Q,
cancelToken: CancelToken,
@ -126,7 +130,7 @@ export class QueryManager<Q, R, I = never, E extends Error = Error> {
this.currentRunCancelFn = undefined;
this.setState(
new QueryState<R, E>({
error: axios.isCancel(e) ? new Error(`canceled.`) : e, // remap cancellation into a simple error to hide away the axios implementation specifics
error: QueryManager.remapAxiosCancellationIntoError(e),
lastData: this.state.getSomeData(),
}),
);
@ -174,7 +178,7 @@ export class QueryManager<Q, R, I = never, E extends Error = Error> {
this.currentRunCancelFn = undefined;
this.setState(
new QueryState<R, E>({
error: axios.isCancel(e) ? new Error(`canceled.`) : e, // remap cancellation into a simple error to hide away the axios implementation specifics
error: QueryManager.remapAxiosCancellationIntoError(e),
lastData: this.state.getSomeData(),
}),
);
@ -219,6 +223,7 @@ export class QueryManager<Q, R, I = never, E extends Error = Error> {
public rerunLastQuery(runInBackground = false): void {
if (this.terminated) return;
if (runInBackground && this.currentRunCancelFn) return;
this.nextQuery = this.lastQuery;
if (runInBackground) {
void this.runWhenIdle();
@ -227,9 +232,9 @@ export class QueryManager<Q, R, I = never, E extends Error = Error> {
}
}
public cancelCurrent(): void {
public cancelCurrent(message?: string): void {
if (!this.currentRunCancelFn) return;
this.currentRunCancelFn();
this.currentRunCancelFn(message);
this.currentRunCancelFn = undefined;
}

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { QueryResult, sane } from 'druid-query-toolkit';
import { QueryResult, sane } from '@druid-toolkit/query';
import { sampleDataToQuery } from './sample-query';

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import type { Column, LiteralValue, QueryResult, SqlExpression } from 'druid-query-toolkit';
import type { Column, LiteralValue, QueryResult, SqlExpression } from '@druid-toolkit/query';
import {
C,
F,
@ -27,7 +27,7 @@ import {
SqlQuery,
SqlRecord,
SqlValues,
} from 'druid-query-toolkit';
} from '@druid-toolkit/query';
import { oneOf } from './general';

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { dedupe } from 'druid-query-toolkit';
import { dedupe } from '@druid-toolkit/query';
import * as JSONBig from 'json-bigint-native';
import type {

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import { SqlColumn, SqlExpression, SqlFunction, SqlLiteral, SqlStar } from 'druid-query-toolkit';
import { SqlColumn, SqlExpression, SqlFunction, SqlLiteral, SqlStar } from '@druid-toolkit/query';
export function timeFormatToSql(timeFormat: string): SqlExpression | undefined {
switch (timeFormat) {

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import type { QueryResult } from 'druid-query-toolkit';
import type { QueryResult } from '@druid-toolkit/query';
import { filterMap, formatNumber, oneOf } from './general';
import { deepSet } from './object-change';

View File

@ -18,7 +18,7 @@
import type { IconName } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import type { Column } from 'druid-query-toolkit';
import type { Column } from '@druid-toolkit/query';
export function columnToSummary(column: Column): string {
const lines: string[] = [column.name];

View File

@ -68,7 +68,7 @@ exports[`DatasourcesView matches snapshot 1`] = `
Array [
"Datasource name",
"Availability",
"Availability detail",
"Historical load/drop queues",
"Total data size",
"Running tasks",
"Segment rows",
@ -175,9 +175,9 @@ exports[`DatasourcesView matches snapshot 1`] = `
Object {
"Cell": [Function],
"Header": <React.Fragment>
Availability
Historical
<br />
detail
load/drop queues
</React.Fragment>,
"accessor": "num_segments_to_load",
"className": "padded",

View File

@ -18,9 +18,9 @@
import { FormGroup, InputGroup, Intent, MenuItem, Switch } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { SqlQuery, T } from '@druid-toolkit/query';
import classNames from 'classnames';
import { sum } from 'd3-array';
import { SqlQuery, T } from 'druid-query-toolkit';
import React from 'react';
import type { Filter } from 'react-table';
import ReactTable from 'react-table';
@ -57,6 +57,7 @@ import { STANDARD_TABLE_PAGE_SIZE, STANDARD_TABLE_PAGE_SIZE_OPTIONS } from '../.
import { Api, AppToaster } from '../../singletons';
import type { NumberLike } from '../../utils';
import {
assemble,
compact,
countBy,
deepGet,
@ -88,7 +89,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
'full': [
'Datasource name',
'Availability',
'Availability detail',
'Historical load/drop queues',
'Total data size',
'Running tasks',
'Segment rows',
@ -106,7 +107,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
'no-sql': [
'Datasource name',
'Availability',
'Availability detail',
'Historical load/drop queues',
'Total data size',
'Running tasks',
'Compaction',
@ -118,7 +119,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
'no-proxy': [
'Datasource name',
'Availability',
'Availability detail',
'Historical load/drop queues',
'Total data size',
'Running tasks',
'Segment rows',
@ -162,15 +163,16 @@ const PERCENT_BRACES = [formatPercent(1)];
interface DatasourceQueryResultRow {
readonly datasource: string;
readonly num_segments: NumberLike;
readonly num_segments_to_load: NumberLike;
readonly num_segments_to_drop: NumberLike;
readonly minute_aligned_segments: NumberLike;
readonly hour_aligned_segments: NumberLike;
readonly day_aligned_segments: NumberLike;
readonly month_aligned_segments: NumberLike;
readonly year_aligned_segments: NumberLike;
readonly all_granularity_segments: NumberLike;
readonly num_segments: number;
readonly num_zero_replica_segments: number;
readonly num_segments_to_load: number;
readonly num_segments_to_drop: number;
readonly minute_aligned_segments: number;
readonly hour_aligned_segments: number;
readonly day_aligned_segments: number;
readonly month_aligned_segments: number;
readonly year_aligned_segments: number;
readonly all_granularity_segments: number;
readonly total_data_size: NumberLike;
readonly replicated_size: NumberLike;
readonly min_segment_rows: NumberLike;
@ -187,6 +189,7 @@ function makeEmptyDatasourceQueryResultRow(datasource: string): DatasourceQueryR
return {
datasource,
num_segments: 0,
num_zero_replica_segments: 0,
num_segments_to_load: 0,
num_segments_to_drop: 0,
minute_aligned_segments: 0,
@ -335,10 +338,13 @@ export class DatasourcesView extends React.PureComponent<
const columns = compact(
[
visibleColumns.shown('Datasource name') && `datasource`,
(visibleColumns.shown('Availability') || visibleColumns.shown('Segment granularity')) &&
(visibleColumns.shown('Availability') || visibleColumns.shown('Segment granularity')) && [
`COUNT(*) FILTER (WHERE is_active = 1) AS num_segments`,
(visibleColumns.shown('Availability') || visibleColumns.shown('Availability detail')) && [
`COUNT(*) FILTER (WHERE is_published = 1 AND is_overshadowed = 0 AND is_available = 0) AS num_segments_to_load`,
`COUNT(*) FILTER (WHERE is_published = 1 AND is_overshadowed = 0 AND replication_factor = 0) AS num_zero_replica_segments`,
],
(visibleColumns.shown('Availability') ||
visibleColumns.shown('Historical load/drop queues')) && [
`COUNT(*) FILTER (WHERE is_published = 1 AND is_overshadowed = 0 AND is_available = 0 AND replication_factor > 0) AS num_segments_to_load`,
`COUNT(*) FILTER (WHERE is_available = 1 AND is_active = 0) AS num_segments_to_drop`,
],
visibleColumns.shown('Total data size') &&
@ -445,6 +451,7 @@ GROUP BY 1, 2`;
return {
datasource: d.name,
num_segments: numSegments,
num_zero_replica_segments: 0,
num_segments_to_load: segmentsToLoad,
num_segments_to_drop: 0,
minute_aligned_segments: -1,
@ -1031,7 +1038,7 @@ GROUP BY 1, 2`;
}
}
private renderRetentionDialog(): JSX.Element | undefined {
private renderRetentionDialog() {
const { capabilities } = this.props;
const { retentionDialogOpenOn, datasourcesAndDefaultRulesState } = this.state;
const defaultRules = datasourcesAndDefaultRulesState.data?.defaultRules;
@ -1147,7 +1154,8 @@ GROUP BY 1, 2`;
accessor: 'num_segments',
className: 'padded',
Cell: ({ value: num_segments, original }) => {
const { datasource, unused, num_segments_to_load, rules } = original as Datasource;
const { datasource, unused, num_segments_to_load, num_zero_replica_segments, rules } =
original as Datasource;
if (unused) {
return (
<span>
@ -1157,12 +1165,17 @@ GROUP BY 1, 2`;
);
}
const hasCold = RuleUtil.hasColdRule(rules, defaultRules);
const hasZeroReplicationRule = RuleUtil.hasZeroReplicaRule(rules, defaultRules);
const descriptor = hasZeroReplicationRule ? 'pre-cached' : 'available';
const segmentsEl = (
<a onClick={() => goToSegments(datasource)}>
{pluralIfNeeded(num_segments, 'segment')}
</a>
);
const percentZeroReplica = (
Math.floor((num_zero_replica_segments / num_segments) * 1000) / 10
).toFixed(1);
if (typeof num_segments_to_load !== 'number' || typeof num_segments !== 'number') {
return '-';
} else if (num_segments === 0) {
@ -1172,17 +1185,19 @@ GROUP BY 1, 2`;
Empty
</span>
);
} else if (num_segments_to_load === 0 || hasCold) {
const numAvailableSegments = num_segments - num_segments_to_load;
const percentHot = (
Math.floor((numAvailableSegments / num_segments) * 1000) / 10
).toFixed(1);
} else if (num_segments_to_load === 0) {
return (
<span>
<span style={{ color: DatasourcesView.FULLY_AVAILABLE_COLOR }}>
&#x25cf;&nbsp;
</span>
Fully available{hasCold ? `, ${percentHot}% hot` : ''} ({segmentsEl})
{assemble(
num_segments !== num_zero_replica_segments
? `Fully ${descriptor}`
: undefined,
hasZeroReplicationRule ? `${percentZeroReplica}% async only` : '',
).join(', ')}{' '}
({segmentsEl})
</span>
);
} else {
@ -1195,7 +1210,10 @@ GROUP BY 1, 2`;
<span style={{ color: DatasourcesView.PARTIALLY_AVAILABLE_COLOR }}>
{numAvailableSegments ? '\u25cf' : '\u25cb'}&nbsp;
</span>
{percentAvailable}% available ({segmentsEl})
{`${percentAvailable}% ${descriptor}${
hasZeroReplicationRule ? `, ${percentZeroReplica}% async only` : ''
}`}{' '}
({segmentsEl})
</span>
);
}
@ -1207,17 +1225,14 @@ GROUP BY 1, 2`;
},
},
{
Header: twoLines('Availability', 'detail'),
show: visibleColumns.shown('Availability detail'),
Header: twoLines('Historical', 'load/drop queues'),
show: visibleColumns.shown('Historical load/drop queues'),
accessor: 'num_segments_to_load',
filterable: false,
width: 180,
className: 'padded',
Cell: ({ original }) => {
const { num_segments_to_load, num_segments_to_drop, rules } = original as Datasource;
if (RuleUtil.hasColdRule(rules, defaultRules)) {
return pluralIfNeeded(num_segments_to_load, 'cold segment');
}
const { num_segments_to_load, num_segments_to_drop } = original as Datasource;
return formatLoadDrop(num_segments_to_load, num_segments_to_drop);
},
},
@ -1591,7 +1606,7 @@ GROUP BY 1, 2`;
);
}
render(): JSX.Element {
render() {
const { capabilities } = this.props;
const {
showUnused,

View File

@ -27,8 +27,10 @@ import { deepGet, pluralIfNeeded, queryDruidSql } from '../../../utils';
import { HomeViewCard } from '../home-view-card/home-view-card';
export interface SegmentCounts {
total: number;
active: number;
cached_on_historical: number;
unavailable: number;
realtime: number;
}
export interface SegmentsCardProps {
@ -37,13 +39,17 @@ export interface SegmentsCardProps {
export const SegmentsCard = React.memo(function SegmentsCard(props: SegmentsCardProps) {
const [segmentCountState] = useQueryManager<Capabilities, SegmentCounts>({
initQuery: props.capabilities,
processQuery: async capabilities => {
if (capabilities.hasSql()) {
const segments = await queryDruidSql({
query: `SELECT
COUNT(*) as "total",
COUNT(*) FILTER (WHERE is_active = 1 AND is_available = 0) as "unavailable"
FROM sys.segments`,
COUNT(*) AS "active",
COUNT(*) FILTER (WHERE is_available = 1) AS "cached_on_historical",
COUNT(*) FILTER (WHERE is_available = 0 AND replication_factor > 0) AS "unavailable",
COUNT(*) FILTER (WHERE is_realtime = 1) AS "realtime"
FROM sys.segments
WHERE is_active = 1`,
});
return segments.length === 1 ? segments[0] : null;
} else if (capabilities.hasCoordinatorAccess()) {
@ -60,17 +66,23 @@ FROM sys.segments`,
);
return {
total: availableSegmentNum + unavailableSegmentNum,
unavailable: unavailableSegmentNum,
active: availableSegmentNum + unavailableSegmentNum,
cached_on_historical: availableSegmentNum,
unavailable: unavailableSegmentNum, // This is no longer fully accurate because it does not replicate the [AND replication_factor > 0] condition of the SQL, this info is not in this API
realtime: 0, // Realtime segments are sadly not reported by this API
};
} else {
throw new Error(`must have SQL or coordinator access`);
}
},
initQuery: props.capabilities,
});
const segmentCount = segmentCountState.data || { total: 0, unavailable: 0 };
const segmentCount: SegmentCounts = segmentCountState.data || {
active: 0,
cached_on_historical: 0,
unavailable: 0,
realtime: 0,
};
return (
<HomeViewCard
className="segments-card"
@ -80,9 +92,15 @@ FROM sys.segments`,
loading={segmentCountState.loading}
error={segmentCountState.error}
>
<p>{pluralIfNeeded(segmentCount.total, 'segment')}</p>
<p>{pluralIfNeeded(segmentCount.active, 'active segment')}</p>
{Boolean(segmentCount.unavailable) && (
<p>{pluralIfNeeded(segmentCount.unavailable, 'unavailable segment')}</p>
<p>
{pluralIfNeeded(segmentCount.unavailable, 'segment')} waiting to be cached on historicals
</p>
)}
<p>{pluralIfNeeded(segmentCount.cached_on_historical, 'segment')} cached on historicals</p>
{Boolean(segmentCount.realtime) && (
<p>{pluralIfNeeded(segmentCount.realtime, 'realtime segment')}</p>
)}
</HomeViewCard>
);

View File

@ -41,6 +41,7 @@ import { Popover2 } from '@blueprintjs/popover2';
import classNames from 'classnames';
import * as JSONBig from 'json-bigint-native';
import memoize from 'memoize-one';
import type { JSX } from 'react';
import React from 'react';
import {
@ -688,7 +689,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
);
}
render(): JSX.Element {
render() {
const { mode } = this.props;
const { step, continueToSpec } = this.state;
const type = mode === 'all' ? '' : `${mode} `;

View File

@ -488,7 +488,7 @@ export class LookupsView extends React.PureComponent<LookupsViewProps, LookupsVi
);
}
render(): JSX.Element {
render() {
const { lookupEntriesAndTiersState, visibleColumns, lookupTableActionDialogId, actions } =
this.state;

View File

@ -64,6 +64,7 @@ exports[`SegmentsView matches snapshot 1`] = `
"Num rows",
"Avg. row size",
"Replicas",
"Replication factor",
"Is available",
"Is active",
"Is realtime",
@ -261,7 +262,9 @@ exports[`SegmentsView matches snapshot 1`] = `
"Header": <React.Fragment>
Avg. row size
<br />
(bytes)
<i>
(bytes)
</i>
</React.Fragment>,
"accessor": "avg_row_size",
"className": "padded",
@ -270,13 +273,34 @@ exports[`SegmentsView matches snapshot 1`] = `
"width": 100,
},
Object {
"Header": "Replicas",
"Header": <React.Fragment>
Replicas
<br />
<i>
(actual)
</i>
</React.Fragment>,
"accessor": "num_replicas",
"className": "padded",
"defaultSortDesc": true,
"filterable": false,
"show": true,
"width": 60,
"width": 80,
},
Object {
"Header": <React.Fragment>
Replication factor
<br />
<i>
(desired)
</i>
</React.Fragment>,
"accessor": "replication_factor",
"className": "padded",
"defaultSortDesc": true,
"filterable": false,
"show": true,
"width": 80,
},
Object {
"Filter": [Function],

View File

@ -16,10 +16,10 @@
* limitations under the License.
*/
import { Button, ButtonGroup, Intent, Label, MenuItem, Switch } from '@blueprintjs/core';
import { Button, ButtonGroup, Code, Intent, Label, MenuItem, Switch } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { C, L, SqlComparison, SqlExpression } from '@druid-toolkit/query';
import classNames from 'classnames';
import { C, L, SqlComparison, SqlExpression } from 'druid-query-toolkit';
import * as JSONBig from 'json-bigint-native';
import React from 'react';
import type { Filter } from 'react-table';
@ -88,6 +88,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
'Num rows',
'Avg. row size',
'Replicas',
'Replication factor',
'Is available',
'Is active',
'Is realtime',
@ -118,6 +119,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
'Num rows',
'Avg. row size',
'Replicas',
'Replication factor',
'Is available',
'Is active',
'Is realtime',
@ -162,6 +164,7 @@ interface SegmentQueryResultRow {
num_rows: NumberLike;
avg_row_size: NumberLike;
num_replicas: number;
replication_factor: number;
is_available: number;
is_active: number;
is_realtime: number;
@ -214,6 +217,7 @@ END AS "time_span"`,
visibleColumns.shown('Avg. row size') &&
`CASE WHEN "num_rows" <> 0 THEN ("size" / "num_rows") ELSE 0 END AS "avg_row_size"`,
visibleColumns.shown('Replicas') && `"num_replicas"`,
visibleColumns.shown('Replication factor') && `"replication_factor"`,
visibleColumns.shown('Is available') && `"is_available"`,
visibleColumns.shown('Is active') && `"is_active"`,
visibleColumns.shown('Is realtime') && `"is_realtime"`,
@ -308,6 +312,17 @@ END AS "time_span"`,
whereClause = SqlExpression.and(...whereParts).toString();
}
let effectiveSorted = sorted;
if (!effectiveSorted.find(sort => sort.id === 'version') && effectiveSorted.length) {
// Ensure there is a sort on version as a tiebreaker
effectiveSorted = effectiveSorted.concat([
{
id: 'version',
desc: effectiveSorted[0].desc, // Take the first direction if it exists
},
]);
}
if (groupByInterval) {
const innerQuery = compact([
`SELECT "start" || '/' || "end" AS "interval"`,
@ -332,11 +347,11 @@ END AS "time_span"`,
whereClause ? ` AND ${whereClause}` : '',
]);
if (sorted.length) {
if (effectiveSorted.length) {
queryParts.push(
'ORDER BY ' +
sorted
.map((sort: any) => `${C(sort.id)} ${sort.desc ? 'DESC' : 'ASC'}`)
effectiveSorted
.map(sort => `${C(sort.id)} ${sort.desc ? 'DESC' : 'ASC'}`)
.join(', '),
);
}
@ -349,11 +364,11 @@ END AS "time_span"`,
queryParts.push(`WHERE ${whereClause}`);
}
if (sorted.length) {
if (effectiveSorted.length) {
queryParts.push(
'ORDER BY ' +
sorted
.map((sort: any) => `${C(sort.id)} ${sort.desc ? 'DESC' : 'ASC'}`)
effectiveSorted
.map(sort => `${C(sort.id)} ${sort.desc ? 'DESC' : 'ASC'}`)
.join(', '),
);
}
@ -413,6 +428,7 @@ END AS "time_span"`,
num_rows: -1,
avg_row_size: -1,
num_replicas: -1,
replication_factor: -1,
is_available: -1,
is_active: -1,
is_realtime: -1,
@ -529,7 +545,11 @@ END AS "time_span"`,
data={segments}
pages={10000000} // Dummy, we are hiding the page selector
loading={segmentsState.loading}
noDataText={segmentsState.isEmpty() ? 'No segments' : segmentsState.getErrorMessage() || ''}
noDataText={
segmentsState.isEmpty()
? `No segments${filters.length ? ' matching filter' : ''}`
: segmentsState.getErrorMessage() || ''
}
manual
filterable
filtered={filters}
@ -781,7 +801,7 @@ END AS "time_span"`,
),
},
{
Header: twoLines('Avg. row size', '(bytes)'),
Header: twoLines('Avg. row size', <i>(bytes)</i>),
show: capabilities.hasSql() && visibleColumns.shown('Avg. row size'),
accessor: 'avg_row_size',
filterable: false,
@ -799,10 +819,19 @@ END AS "time_span"`,
},
},
{
Header: 'Replicas',
Header: twoLines('Replicas', <i>(actual)</i>),
show: hasSql && visibleColumns.shown('Replicas'),
accessor: 'num_replicas',
width: 60,
width: 80,
filterable: false,
defaultSortDesc: true,
className: 'padded',
},
{
Header: twoLines('Replication factor', <i>(desired)</i>),
show: hasSql && visibleColumns.shown('Replication factor'),
accessor: 'replication_factor',
width: 80,
filterable: false,
defaultSortDesc: true,
className: 'padded',
@ -905,7 +934,9 @@ END AS "time_span"`,
this.segmentsQueryManager.rerunLastQuery();
}}
>
<p>{`Are you sure you want to drop segment '${terminateSegmentId}'?`}</p>
<p>
Are you sure you want to drop segment <Code>{terminateSegmentId}</Code>?
</p>
<p>This action is not reversible.</p>
</AsyncActionDialog>
);
@ -932,7 +963,7 @@ END AS "time_span"`,
);
}
render(): JSX.Element {
render() {
const {
segmentTableActionDialogId,
datasourceTableActionDialogId,

View File

@ -759,7 +759,7 @@ ORDER BY
);
}
render(): JSX.Element {
render() {
const { capabilities } = this.props;
const { groupServicesBy, visibleColumns } = this.state;

View File

@ -19,8 +19,9 @@
import { Button, FormGroup, Menu, MenuItem } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { Popover2 } from '@blueprintjs/popover2';
import type { QueryResult, SqlExpression } from 'druid-query-toolkit';
import { F } from 'druid-query-toolkit';
import type { QueryResult, SqlExpression } from '@druid-toolkit/query';
import { F } from '@druid-toolkit/query';
import type { JSX } from 'react';
import React from 'react';
import { possibleDruidFormatForValues, TIME_COLUMN } from '../../../druid-models';

View File

@ -19,8 +19,9 @@
import { Button, FormGroup, InputGroup, Intent, Menu, MenuItem, Position } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { Popover2 } from '@blueprintjs/popover2';
import type { QueryResult } from 'druid-query-toolkit';
import { SqlExpression, SqlFunction } from 'druid-query-toolkit';
import type { QueryResult } from '@druid-toolkit/query';
import { SqlExpression, SqlFunction } from '@druid-toolkit/query';
import type { JSX } from 'react';
import React, { useState } from 'react';
import { AppToaster } from '../../../singletons';

View File

@ -18,7 +18,7 @@
import { Button, Classes, Dialog, FormGroup, InputGroup, Intent } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { SqlExpression } from 'druid-query-toolkit';
import { SqlExpression } from '@druid-toolkit/query';
import React, { useState } from 'react';
import { FlexibleQueryInput } from '../../workbench-view/flexible-query-input/flexible-query-input';

View File

@ -18,8 +18,8 @@
import { Button, Classes, Dialog, Intent } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { T } from '@druid-toolkit/query';
import classNames from 'classnames';
import { T } from 'druid-query-toolkit';
import React, { useState } from 'react';
import type { Execution, QueryWithContext } from '../../../druid-models';

View File

@ -19,7 +19,7 @@
import { Icon } from '@blueprintjs/core';
import { IconNames } from '@blueprintjs/icons';
import { Popover2 } from '@blueprintjs/popover2';
import type { QueryResult, SqlExpression } from 'druid-query-toolkit';
import type { QueryResult, SqlExpression } from '@druid-toolkit/query';
import React, { useMemo } from 'react';
import { LearnMore, PopoverText } from '../../../../components';

View File

@ -17,9 +17,9 @@
*/
import { Icon } from '@blueprintjs/core';
import type { Column, QueryResult } from '@druid-toolkit/query';
import { SqlColumn } from '@druid-toolkit/query';
import classNames from 'classnames';
import type { Column, QueryResult } from 'druid-query-toolkit';
import { SqlColumn } from 'druid-query-toolkit';
import React from 'react';
import { columnToIcon } from '../../../../../utils';

Some files were not shown because too many files have changed in this diff Show More