mirror of https://github.com/apache/druid.git
Web console: support new ingest spec format (#8828)
* converter v1 * working v1 * update tests * update tests * upgrades * adjust to new API * remove hack * fwd * step * neo cache * fix time selection * smart reset * parquest autodetection * add binaryAsString option * partitionsSpec * add ORC support * ingestSegment -> druid * remove index tasks * better min * load data works * remove downgrade * filter on group_id * fix group_id in test * update auto form for new props * add dropBeforeByPeriod rule * simplify * prettify json
This commit is contained in:
parent
8dd9a8cb15
commit
1cff73f3e0
Before Width: | Height: | Size: 8.4 KiB After Width: | Height: | Size: 8.4 KiB |
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 19 KiB |
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
|
@ -23,7 +23,7 @@ import { compact } from '../../utils';
|
||||||
|
|
||||||
export interface ArrayInputProps {
|
export interface ArrayInputProps {
|
||||||
className?: string;
|
className?: string;
|
||||||
values: string[];
|
values: string[] | undefined;
|
||||||
onChange: (newValues: string[] | undefined) => void;
|
onChange: (newValues: string[] | undefined) => void;
|
||||||
placeholder?: string;
|
placeholder?: string;
|
||||||
large?: boolean;
|
large?: boolean;
|
||||||
|
@ -40,8 +40,11 @@ export const ArrayInput = React.memo(function ArrayInput(props: ArrayInputProps)
|
||||||
const stringValue = e.target.value;
|
const stringValue = e.target.value;
|
||||||
const newValues: string[] = stringValue.split(/[,\s]+/).map((v: string) => v.trim());
|
const newValues: string[] = stringValue.split(/[,\s]+/).map((v: string) => v.trim());
|
||||||
const newValuesFiltered = compact(newValues);
|
const newValuesFiltered = compact(newValues);
|
||||||
if (newValues.length === newValuesFiltered.length) {
|
if (stringValue === '') {
|
||||||
onChange(stringValue === '' ? undefined : newValuesFiltered);
|
onChange(undefined);
|
||||||
|
setStringValue(undefined);
|
||||||
|
} else if (newValues.length === newValuesFiltered.length) {
|
||||||
|
onChange(newValuesFiltered);
|
||||||
setStringValue(undefined);
|
setStringValue(undefined);
|
||||||
} else {
|
} else {
|
||||||
setStringValue(stringValue);
|
setStringValue(stringValue);
|
||||||
|
@ -51,7 +54,7 @@ export const ArrayInput = React.memo(function ArrayInput(props: ArrayInputProps)
|
||||||
return (
|
return (
|
||||||
<TextArea
|
<TextArea
|
||||||
className={className}
|
className={className}
|
||||||
value={stringValue || props.values.join(', ')}
|
value={stringValue || (props.values || []).join(', ')}
|
||||||
onChange={handleChange}
|
onChange={handleChange}
|
||||||
placeholder={placeholder}
|
placeholder={placeholder}
|
||||||
large={large}
|
large={large}
|
||||||
|
|
|
@ -25,11 +25,13 @@ import { FormGroupWithInfo } from '../form-group-with-info/form-group-with-info'
|
||||||
import { IntervalInput } from '../interval-input/interval-input';
|
import { IntervalInput } from '../interval-input/interval-input';
|
||||||
import { JsonInput } from '../json-input/json-input';
|
import { JsonInput } from '../json-input/json-input';
|
||||||
import { PopoverText } from '../popover-text/popover-text';
|
import { PopoverText } from '../popover-text/popover-text';
|
||||||
import { SuggestibleInput, SuggestionGroup } from '../suggestible-input/suggestible-input';
|
import { SuggestibleInput, Suggestion } from '../suggestible-input/suggestible-input';
|
||||||
|
|
||||||
import './auto-form.scss';
|
import './auto-form.scss';
|
||||||
|
|
||||||
export interface Field<T> {
|
export type Functor<M, R> = R | ((model: M) => R);
|
||||||
|
|
||||||
|
export interface Field<M> {
|
||||||
name: string;
|
name: string;
|
||||||
label?: string;
|
label?: string;
|
||||||
info?: React.ReactNode;
|
info?: React.ReactNode;
|
||||||
|
@ -43,12 +45,14 @@ export interface Field<T> {
|
||||||
| 'json'
|
| 'json'
|
||||||
| 'interval';
|
| 'interval';
|
||||||
defaultValue?: any;
|
defaultValue?: any;
|
||||||
suggestions?: (string | SuggestionGroup)[];
|
suggestions?: Functor<M, Suggestion[]>;
|
||||||
placeholder?: string;
|
placeholder?: string;
|
||||||
min?: number;
|
min?: number;
|
||||||
disabled?: boolean | ((model: T) => boolean);
|
zeroMeansUndefined?: boolean;
|
||||||
defined?: boolean | ((model: T) => boolean);
|
disabled?: Functor<M, boolean>;
|
||||||
required?: boolean | ((model: T) => boolean);
|
defined?: Functor<M, boolean>;
|
||||||
|
required?: Functor<M, boolean>;
|
||||||
|
adjustment?: (model: M) => M;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface AutoFormProps<T> {
|
export interface AutoFormProps<T> {
|
||||||
|
@ -73,21 +77,16 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
|
||||||
return newLabel;
|
return newLabel;
|
||||||
}
|
}
|
||||||
|
|
||||||
static evaluateFunctor<T>(
|
static evaluateFunctor<M, R>(
|
||||||
functor: undefined | boolean | ((model: T) => boolean),
|
functor: undefined | Functor<M, R>,
|
||||||
model: T | undefined,
|
model: M | undefined,
|
||||||
defaultValue = false,
|
defaultValue: R,
|
||||||
): boolean {
|
): R {
|
||||||
if (!model || functor == null) return defaultValue;
|
if (!model || functor == null) return defaultValue;
|
||||||
switch (typeof functor) {
|
if (typeof functor === 'function') {
|
||||||
case 'boolean':
|
return (functor as any)(model);
|
||||||
return functor;
|
} else {
|
||||||
|
return functor;
|
||||||
case 'function':
|
|
||||||
return functor(model);
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new TypeError(`invalid functor`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,27 +108,42 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
|
||||||
};
|
};
|
||||||
|
|
||||||
private modelChange = (newModel: T) => {
|
private modelChange = (newModel: T) => {
|
||||||
const { fields, onChange } = this.props;
|
const { fields, onChange, model } = this.props;
|
||||||
|
|
||||||
|
// Delete things that are not defined now (but were defined prior to the change)
|
||||||
for (const someField of fields) {
|
for (const someField of fields) {
|
||||||
if (!AutoForm.evaluateFunctor(someField.defined, newModel, true)) {
|
if (
|
||||||
|
!AutoForm.evaluateFunctor(someField.defined, newModel, true) &&
|
||||||
|
AutoForm.evaluateFunctor(someField.defined, model, true)
|
||||||
|
) {
|
||||||
newModel = deepDelete(newModel, someField.name);
|
newModel = deepDelete(newModel, someField.name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Perform any adjustments if needed
|
||||||
|
for (const someField of fields) {
|
||||||
|
if (someField.adjustment) {
|
||||||
|
newModel = someField.adjustment(newModel);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
onChange(newModel);
|
onChange(newModel);
|
||||||
};
|
};
|
||||||
|
|
||||||
private renderNumberInput(field: Field<T>): JSX.Element {
|
private renderNumberInput(field: Field<T>): JSX.Element {
|
||||||
const { model, large, onFinalize } = this.props;
|
const { model, large, onFinalize } = this.props;
|
||||||
|
|
||||||
const modelValue = deepGet(model as any, field.name) || field.defaultValue;
|
let modelValue = deepGet(model as any, field.name);
|
||||||
|
if (typeof modelValue !== 'number') modelValue = field.defaultValue;
|
||||||
return (
|
return (
|
||||||
<NumericInput
|
<NumericInput
|
||||||
value={modelValue}
|
value={modelValue}
|
||||||
onValueChange={(valueAsNumber: number, valueAsString: string) => {
|
onValueChange={(valueAsNumber: number, valueAsString: string) => {
|
||||||
if (valueAsString === '' || isNaN(valueAsNumber)) return;
|
if (valueAsString === '' || isNaN(valueAsNumber)) return;
|
||||||
this.fieldChange(field, valueAsNumber);
|
this.fieldChange(
|
||||||
|
field,
|
||||||
|
valueAsNumber === 0 && field.zeroMeansUndefined ? undefined : valueAsNumber,
|
||||||
|
);
|
||||||
}}
|
}}
|
||||||
onBlur={e => {
|
onBlur={e => {
|
||||||
if (e.target.value === '') {
|
if (e.target.value === '') {
|
||||||
|
@ -140,10 +154,10 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
|
||||||
min={field.min || 0}
|
min={field.min || 0}
|
||||||
fill
|
fill
|
||||||
large={large}
|
large={large}
|
||||||
disabled={AutoForm.evaluateFunctor(field.disabled, model)}
|
disabled={AutoForm.evaluateFunctor(field.disabled, model, false)}
|
||||||
placeholder={field.placeholder}
|
placeholder={field.placeholder}
|
||||||
intent={
|
intent={
|
||||||
AutoForm.evaluateFunctor(field.required, model) && modelValue == null
|
AutoForm.evaluateFunctor(field.required, model, false) && modelValue == null
|
||||||
? AutoForm.REQUIRED_INTENT
|
? AutoForm.REQUIRED_INTENT
|
||||||
: undefined
|
: undefined
|
||||||
}
|
}
|
||||||
|
@ -169,7 +183,7 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
|
||||||
majorStepSize={1000000}
|
majorStepSize={1000000}
|
||||||
fill
|
fill
|
||||||
large={large}
|
large={large}
|
||||||
disabled={AutoForm.evaluateFunctor(field.disabled, model)}
|
disabled={AutoForm.evaluateFunctor(field.disabled, model, false)}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -190,11 +204,11 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
|
||||||
}}
|
}}
|
||||||
onFinalize={onFinalize}
|
onFinalize={onFinalize}
|
||||||
placeholder={field.placeholder}
|
placeholder={field.placeholder}
|
||||||
suggestions={field.suggestions}
|
suggestions={AutoForm.evaluateFunctor(field.suggestions, model, undefined)}
|
||||||
large={large}
|
large={large}
|
||||||
disabled={AutoForm.evaluateFunctor(field.disabled, model)}
|
disabled={AutoForm.evaluateFunctor(field.disabled, model, false)}
|
||||||
intent={
|
intent={
|
||||||
AutoForm.evaluateFunctor(field.required, model) && modelValue == null
|
AutoForm.evaluateFunctor(field.required, model, false) && modelValue == null
|
||||||
? AutoForm.REQUIRED_INTENT
|
? AutoForm.REQUIRED_INTENT
|
||||||
: undefined
|
: undefined
|
||||||
}
|
}
|
||||||
|
@ -206,9 +220,9 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
|
||||||
const { model, large, onFinalize } = this.props;
|
const { model, large, onFinalize } = this.props;
|
||||||
const modelValue = deepGet(model as any, field.name);
|
const modelValue = deepGet(model as any, field.name);
|
||||||
const shownValue = modelValue == null ? field.defaultValue : modelValue;
|
const shownValue = modelValue == null ? field.defaultValue : modelValue;
|
||||||
const disabled = AutoForm.evaluateFunctor(field.disabled, model);
|
const disabled = AutoForm.evaluateFunctor(field.disabled, model, false);
|
||||||
const intent =
|
const intent =
|
||||||
AutoForm.evaluateFunctor(field.required, model) && modelValue == null
|
AutoForm.evaluateFunctor(field.required, model, false) && modelValue == null
|
||||||
? AutoForm.REQUIRED_INTENT
|
? AutoForm.REQUIRED_INTENT
|
||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
|
@ -263,9 +277,9 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
|
||||||
}}
|
}}
|
||||||
placeholder={field.placeholder}
|
placeholder={field.placeholder}
|
||||||
large={large}
|
large={large}
|
||||||
disabled={AutoForm.evaluateFunctor(field.disabled, model)}
|
disabled={AutoForm.evaluateFunctor(field.disabled, model, false)}
|
||||||
intent={
|
intent={
|
||||||
AutoForm.evaluateFunctor(field.required, model) && modelValue == null
|
AutoForm.evaluateFunctor(field.required, model, false) && modelValue == null
|
||||||
? AutoForm.REQUIRED_INTENT
|
? AutoForm.REQUIRED_INTENT
|
||||||
: undefined
|
: undefined
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,59 +90,54 @@ exports[`rule editor matches snapshot 1`] = `
|
||||||
>
|
>
|
||||||
<select>
|
<select>
|
||||||
<option
|
<option
|
||||||
value="load"
|
value="loadForever"
|
||||||
>
|
>
|
||||||
Load
|
loadForever
|
||||||
</option>
|
</option>
|
||||||
<option
|
<option
|
||||||
value="drop"
|
value="loadByInterval"
|
||||||
>
|
>
|
||||||
Drop
|
loadByInterval
|
||||||
</option>
|
</option>
|
||||||
<option
|
<option
|
||||||
value="broadcast"
|
value="loadByPeriod"
|
||||||
>
|
>
|
||||||
Broadcast
|
loadByPeriod
|
||||||
</option>
|
|
||||||
</select>
|
|
||||||
<span
|
|
||||||
class="bp3-icon bp3-icon-double-caret-vertical"
|
|
||||||
icon="double-caret-vertical"
|
|
||||||
>
|
|
||||||
<svg
|
|
||||||
data-icon="double-caret-vertical"
|
|
||||||
height="16"
|
|
||||||
viewBox="0 0 16 16"
|
|
||||||
width="16"
|
|
||||||
>
|
|
||||||
<desc>
|
|
||||||
double-caret-vertical
|
|
||||||
</desc>
|
|
||||||
<path
|
|
||||||
d="M5 7h6a1.003 1.003 0 00.71-1.71l-3-3C8.53 2.11 8.28 2 8 2s-.53.11-.71.29l-3 3A1.003 1.003 0 005 7zm6 2H5a1.003 1.003 0 00-.71 1.71l3 3c.18.18.43.29.71.29s.53-.11.71-.29l3-3A1.003 1.003 0 0011 9z"
|
|
||||||
fill-rule="evenodd"
|
|
||||||
/>
|
|
||||||
</svg>
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
<div
|
|
||||||
class="bp3-html-select"
|
|
||||||
>
|
|
||||||
<select>
|
|
||||||
<option
|
|
||||||
value="Forever"
|
|
||||||
>
|
|
||||||
forever
|
|
||||||
</option>
|
</option>
|
||||||
<option
|
<option
|
||||||
value="ByPeriod"
|
value="dropForever"
|
||||||
>
|
>
|
||||||
by period
|
dropForever
|
||||||
</option>
|
</option>
|
||||||
<option
|
<option
|
||||||
value="ByInterval"
|
value="dropByInterval"
|
||||||
>
|
>
|
||||||
by interval
|
dropByInterval
|
||||||
|
</option>
|
||||||
|
<option
|
||||||
|
value="dropByPeriod"
|
||||||
|
>
|
||||||
|
dropByPeriod
|
||||||
|
</option>
|
||||||
|
<option
|
||||||
|
value="dropBeforeByPeriod"
|
||||||
|
>
|
||||||
|
dropBeforeByPeriod
|
||||||
|
</option>
|
||||||
|
<option
|
||||||
|
value="broadcastForever"
|
||||||
|
>
|
||||||
|
broadcastForever
|
||||||
|
</option>
|
||||||
|
<option
|
||||||
|
value="broadcastByInterval"
|
||||||
|
>
|
||||||
|
broadcastByInterval
|
||||||
|
</option>
|
||||||
|
<option
|
||||||
|
value="broadcastByPeriod"
|
||||||
|
>
|
||||||
|
broadcastByPeriod
|
||||||
</option>
|
</option>
|
||||||
</select>
|
</select>
|
||||||
<span
|
<span
|
||||||
|
|
|
@ -31,10 +31,7 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.by-period {
|
.include-future {
|
||||||
display: flex;
|
margin-left: 15px;
|
||||||
.bp3-input-group {
|
|
||||||
padding-right: 15px;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,9 +49,6 @@ export const RuleEditor = React.memo(function RuleEditor(props: RuleEditorProps)
|
||||||
const [isOpen, setIsOpen] = useState(true);
|
const [isOpen, setIsOpen] = useState(true);
|
||||||
if (!rule) return null;
|
if (!rule) return null;
|
||||||
|
|
||||||
const ruleLoadType = RuleUtil.getLoadType(rule);
|
|
||||||
const ruleTimeType = RuleUtil.getTimeType(rule);
|
|
||||||
|
|
||||||
function removeTier(key: string) {
|
function removeTier(key: string) {
|
||||||
const newTierReplicants = Object.assign({}, rule.tieredReplicants);
|
const newTierReplicants = Object.assign({}, rule.tieredReplicants);
|
||||||
delete newTierReplicants[key];
|
delete newTierReplicants[key];
|
||||||
|
@ -72,14 +69,12 @@ export const RuleEditor = React.memo(function RuleEditor(props: RuleEditorProps)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
onChange(RuleUtil.changeTierReplication(rule, newTierName, 1));
|
onChange(RuleUtil.addTieredReplicant(rule, newTierName, 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
function renderTiers() {
|
function renderTiers() {
|
||||||
if (RuleUtil.getLoadType(rule) !== 'load') return null;
|
|
||||||
|
|
||||||
const tieredReplicants = rule.tieredReplicants;
|
const tieredReplicants = rule.tieredReplicants;
|
||||||
if (!tieredReplicants) return null;
|
if (!tieredReplicants) return;
|
||||||
|
|
||||||
const ruleTiers = Object.keys(tieredReplicants).sort();
|
const ruleTiers = Object.keys(tieredReplicants).sort();
|
||||||
return ruleTiers.map(tier => {
|
return ruleTiers.map(tier => {
|
||||||
|
@ -92,7 +87,7 @@ export const RuleEditor = React.memo(function RuleEditor(props: RuleEditorProps)
|
||||||
value={tieredReplicants[tier]}
|
value={tieredReplicants[tier]}
|
||||||
onValueChange={(v: number) => {
|
onValueChange={(v: number) => {
|
||||||
if (isNaN(v)) return;
|
if (isNaN(v)) return;
|
||||||
onChange(RuleUtil.changeTierReplication(rule, tier, v));
|
onChange(RuleUtil.addTieredReplicant(rule, tier, v));
|
||||||
}}
|
}}
|
||||||
min={1}
|
min={1}
|
||||||
max={256}
|
max={256}
|
||||||
|
@ -103,7 +98,9 @@ export const RuleEditor = React.memo(function RuleEditor(props: RuleEditorProps)
|
||||||
<HTMLSelect
|
<HTMLSelect
|
||||||
fill
|
fill
|
||||||
value={tier}
|
value={tier}
|
||||||
onChange={(e: any) => onChange(RuleUtil.changeTier(rule, tier, e.target.value))}
|
onChange={(e: any) =>
|
||||||
|
onChange(RuleUtil.renameTieredReplicants(rule, tier, e.target.value))
|
||||||
|
}
|
||||||
>
|
>
|
||||||
{tiers
|
{tiers
|
||||||
.filter(t => t === tier || !tieredReplicants[t])
|
.filter(t => t === tier || !tieredReplicants[t])
|
||||||
|
@ -127,7 +124,7 @@ export const RuleEditor = React.memo(function RuleEditor(props: RuleEditorProps)
|
||||||
|
|
||||||
function renderTierAdder() {
|
function renderTierAdder() {
|
||||||
const { rule, tiers } = props;
|
const { rule, tiers } = props;
|
||||||
if (Object.keys(rule.tieredReplicants || {}).length >= Object.keys(tiers).length) return null;
|
if (Object.keys(rule.tieredReplicants || {}).length >= Object.keys(tiers).length) return;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<FormGroup className="right">
|
<FormGroup className="right">
|
||||||
|
@ -138,18 +135,6 @@ export const RuleEditor = React.memo(function RuleEditor(props: RuleEditorProps)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function renderColocatedDataSources() {
|
|
||||||
const { rule, onChange } = props;
|
|
||||||
return (
|
|
||||||
<FormGroup label="Colocated datasources:">
|
|
||||||
<TagInput
|
|
||||||
values={rule.colocatedDataSources || []}
|
|
||||||
onChange={(v: any) => onChange(RuleUtil.changeColocatedDataSources(rule, v))}
|
|
||||||
fill
|
|
||||||
/>
|
|
||||||
</FormGroup>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return (
|
return (
|
||||||
<div className="rule-editor">
|
<div className="rule-editor">
|
||||||
<div className="title">
|
<div className="title">
|
||||||
|
@ -172,52 +157,39 @@ export const RuleEditor = React.memo(function RuleEditor(props: RuleEditorProps)
|
||||||
<FormGroup>
|
<FormGroup>
|
||||||
<ControlGroup>
|
<ControlGroup>
|
||||||
<HTMLSelect
|
<HTMLSelect
|
||||||
value={ruleLoadType}
|
value={rule.type}
|
||||||
onChange={(e: any) =>
|
onChange={(e: any) =>
|
||||||
onChange(RuleUtil.changeLoadType(rule, e.target.value as any))
|
onChange(RuleUtil.changeRuleType(rule, e.target.value as any))
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<option value="load">Load</option>
|
{RuleUtil.TYPES.map(type => {
|
||||||
<option value="drop">Drop</option>
|
return (
|
||||||
<option value="broadcast">Broadcast</option>
|
<option key={type} value={type}>
|
||||||
|
{type}
|
||||||
|
</option>
|
||||||
|
);
|
||||||
|
})}
|
||||||
</HTMLSelect>
|
</HTMLSelect>
|
||||||
<HTMLSelect
|
{RuleUtil.hasPeriod(rule) && (
|
||||||
value={ruleTimeType}
|
<InputGroup
|
||||||
onChange={(e: any) =>
|
value={rule.period || ''}
|
||||||
onChange(RuleUtil.changeTimeType(rule, e.target.value as any))
|
onChange={(e: any) =>
|
||||||
}
|
onChange(RuleUtil.changePeriod(rule, e.target.value as any))
|
||||||
>
|
}
|
||||||
<option value="Forever">forever</option>
|
placeholder="P1D"
|
||||||
<option value="ByPeriod">by period</option>
|
/>
|
||||||
<option value="ByInterval">by interval</option>
|
|
||||||
</HTMLSelect>
|
|
||||||
{ruleTimeType === 'ByPeriod' && (
|
|
||||||
<div className={`by-period`}>
|
|
||||||
<InputGroup
|
|
||||||
value={rule.period || ''}
|
|
||||||
onChange={(e: any) =>
|
|
||||||
onChange(RuleUtil.changePeriod(rule, e.target.value as any))
|
|
||||||
}
|
|
||||||
placeholder="P1D"
|
|
||||||
/>
|
|
||||||
<Switch
|
|
||||||
large
|
|
||||||
checked={rule.includeFuture !== undefined ? rule.includeFuture : true}
|
|
||||||
label={`Include future`}
|
|
||||||
onChange={() => {
|
|
||||||
onChange(
|
|
||||||
RuleUtil.changeIncludeFuture(
|
|
||||||
rule,
|
|
||||||
rule.includeFuture !== undefined
|
|
||||||
? (!rule.includeFuture as boolean)
|
|
||||||
: false,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
)}
|
)}
|
||||||
{ruleTimeType === 'ByInterval' && (
|
{RuleUtil.hasIncludeFuture(rule) && (
|
||||||
|
<Switch
|
||||||
|
className="include-future"
|
||||||
|
checked={rule.includeFuture || false}
|
||||||
|
label="Include future"
|
||||||
|
onChange={() => {
|
||||||
|
onChange(RuleUtil.changeIncludeFuture(rule, !rule.includeFuture));
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{RuleUtil.hasInterval(rule) && (
|
||||||
<InputGroup
|
<InputGroup
|
||||||
value={rule.interval || ''}
|
value={rule.interval || ''}
|
||||||
onChange={(e: any) =>
|
onChange={(e: any) =>
|
||||||
|
@ -228,13 +200,21 @@ export const RuleEditor = React.memo(function RuleEditor(props: RuleEditorProps)
|
||||||
)}
|
)}
|
||||||
</ControlGroup>
|
</ControlGroup>
|
||||||
</FormGroup>
|
</FormGroup>
|
||||||
{ruleLoadType === 'load' && (
|
{RuleUtil.hasTieredReplicants(rule) && (
|
||||||
<FormGroup>
|
<FormGroup>
|
||||||
{renderTiers()}
|
{renderTiers()}
|
||||||
{renderTierAdder()}
|
{renderTierAdder()}
|
||||||
</FormGroup>
|
</FormGroup>
|
||||||
)}
|
)}
|
||||||
{ruleLoadType === 'broadcast' && <FormGroup>{renderColocatedDataSources()}</FormGroup>}
|
{RuleUtil.hasColocatedDataSources(rule) && (
|
||||||
|
<FormGroup label="Colocated datasources">
|
||||||
|
<TagInput
|
||||||
|
values={rule.colocatedDataSources || []}
|
||||||
|
onChange={(v: any) => onChange(RuleUtil.changeColocatedDataSources(rule, v))}
|
||||||
|
fill
|
||||||
|
/>
|
||||||
|
</FormGroup>
|
||||||
|
)}
|
||||||
</Card>
|
</Card>
|
||||||
</Collapse>
|
</Collapse>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -35,10 +35,12 @@ export interface SuggestionGroup {
|
||||||
suggestions: string[];
|
suggestions: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type Suggestion = string | SuggestionGroup;
|
||||||
|
|
||||||
export interface SuggestibleInputProps extends HTMLInputProps {
|
export interface SuggestibleInputProps extends HTMLInputProps {
|
||||||
onValueChange: (newValue: string) => void;
|
onValueChange: (newValue: string) => void;
|
||||||
onFinalize?: () => void;
|
onFinalize?: () => void;
|
||||||
suggestions?: (string | SuggestionGroup)[];
|
suggestions?: Suggestion[];
|
||||||
large?: boolean;
|
large?: boolean;
|
||||||
intent?: Intent;
|
intent?: Intent;
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,6 +70,7 @@ export class ConsoleApplication extends React.PureComponent<
|
||||||
|
|
||||||
private supervisorId?: string;
|
private supervisorId?: string;
|
||||||
private taskId?: string;
|
private taskId?: string;
|
||||||
|
private taskGroupId?: string;
|
||||||
private openDialog?: string;
|
private openDialog?: string;
|
||||||
private datasource?: string;
|
private datasource?: string;
|
||||||
private onlyUnavailable?: boolean;
|
private onlyUnavailable?: boolean;
|
||||||
|
@ -109,6 +110,7 @@ export class ConsoleApplication extends React.PureComponent<
|
||||||
private resetInitialsWithDelay() {
|
private resetInitialsWithDelay() {
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
this.taskId = undefined;
|
this.taskId = undefined;
|
||||||
|
this.taskGroupId = undefined;
|
||||||
this.supervisorId = undefined;
|
this.supervisorId = undefined;
|
||||||
this.openDialog = undefined;
|
this.openDialog = undefined;
|
||||||
this.datasource = undefined;
|
this.datasource = undefined;
|
||||||
|
@ -138,8 +140,8 @@ export class ConsoleApplication extends React.PureComponent<
|
||||||
this.resetInitialsWithDelay();
|
this.resetInitialsWithDelay();
|
||||||
};
|
};
|
||||||
|
|
||||||
private goToIngestionWithTaskId = (taskId?: string, openDialog?: string) => {
|
private goToIngestionWithTaskGroupId = (taskGroupId?: string, openDialog?: string) => {
|
||||||
this.taskId = taskId;
|
this.taskGroupId = taskGroupId;
|
||||||
if (openDialog) this.openDialog = openDialog;
|
if (openDialog) this.openDialog = openDialog;
|
||||||
window.location.hash = 'ingestion';
|
window.location.hash = 'ingestion';
|
||||||
this.resetInitialsWithDelay();
|
this.resetInitialsWithDelay();
|
||||||
|
@ -193,7 +195,7 @@ export class ConsoleApplication extends React.PureComponent<
|
||||||
initSupervisorId={this.supervisorId}
|
initSupervisorId={this.supervisorId}
|
||||||
initTaskId={this.taskId}
|
initTaskId={this.taskId}
|
||||||
exampleManifestsUrl={exampleManifestsUrl}
|
exampleManifestsUrl={exampleManifestsUrl}
|
||||||
goToTask={this.goToIngestionWithTaskId}
|
goToIngestion={this.goToIngestionWithTaskGroupId}
|
||||||
/>,
|
/>,
|
||||||
'narrow-pad',
|
'narrow-pad',
|
||||||
);
|
);
|
||||||
|
@ -235,7 +237,7 @@ export class ConsoleApplication extends React.PureComponent<
|
||||||
return this.wrapInViewContainer(
|
return this.wrapInViewContainer(
|
||||||
'ingestion',
|
'ingestion',
|
||||||
<IngestionView
|
<IngestionView
|
||||||
taskId={this.taskId}
|
taskGroupId={this.taskGroupId}
|
||||||
datasourceId={this.datasource}
|
datasourceId={this.datasource}
|
||||||
openDialog={this.openDialog}
|
openDialog={this.openDialog}
|
||||||
goToDatasource={this.goToDatasources}
|
goToDatasource={this.goToDatasources}
|
||||||
|
@ -254,7 +256,7 @@ export class ConsoleApplication extends React.PureComponent<
|
||||||
<ServicesView
|
<ServicesView
|
||||||
middleManager={this.middleManager}
|
middleManager={this.middleManager}
|
||||||
goToQuery={this.goToQuery}
|
goToQuery={this.goToQuery}
|
||||||
goToTask={this.goToIngestionWithTaskId}
|
goToTask={this.goToIngestionWithTaskGroupId}
|
||||||
capabilities={capabilities}
|
capabilities={capabilities}
|
||||||
/>,
|
/>,
|
||||||
);
|
);
|
||||||
|
|
|
@ -230,23 +230,21 @@ export const DOCTOR_CHECKS: DoctorCheck[] = [
|
||||||
try {
|
try {
|
||||||
testSampledData = await postToSampler(
|
testSampledData = await postToSampler(
|
||||||
{
|
{
|
||||||
type: 'index',
|
type: 'index_parallel',
|
||||||
spec: {
|
spec: {
|
||||||
type: 'index',
|
type: 'index_parallel',
|
||||||
ioConfig: { type: 'index', firehose: { type: 'inline', data: '{"test":"Data"}' } },
|
ioConfig: {
|
||||||
|
type: 'index_parallel',
|
||||||
|
inputSource: { type: 'inline', data: '{"test":"Data"}' },
|
||||||
|
inputFormat: { type: 'json' },
|
||||||
|
},
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
timestampSpec: {
|
||||||
type: 'string',
|
column: '!!!_no_such_column_!!!',
|
||||||
parseSpec: {
|
missingValue: '2010-01-01T00:00:00Z',
|
||||||
format: 'json',
|
|
||||||
timestampSpec: {
|
|
||||||
column: '!!!_no_such_column_!!!',
|
|
||||||
missingValue: '2010-01-01T00:00:00Z',
|
|
||||||
},
|
|
||||||
dimensionsSpec: { dimensions: ['test'] },
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
dimensionsSpec: { dimensions: ['test'] },
|
||||||
transformSpec: {},
|
transformSpec: {},
|
||||||
metricsSpec: [],
|
metricsSpec: [],
|
||||||
granularitySpec: { queryGranularity: 'NONE' },
|
granularitySpec: { queryGranularity: 'NONE' },
|
||||||
|
|
|
@ -0,0 +1,76 @@
|
||||||
|
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||||
|
|
||||||
|
exports[`ingestion-spec upgrades 1`] = `
|
||||||
|
Object {
|
||||||
|
"dataSchema": Object {
|
||||||
|
"dataSource": "wikipedia",
|
||||||
|
"dimensionsSpec": Object {
|
||||||
|
"dimensions": Array [
|
||||||
|
"channel",
|
||||||
|
"cityName",
|
||||||
|
"comment",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"granularitySpec": Object {
|
||||||
|
"queryGranularity": "HOUR",
|
||||||
|
"rollup": true,
|
||||||
|
"segmentGranularity": "DAY",
|
||||||
|
"type": "uniform",
|
||||||
|
},
|
||||||
|
"metricsSpec": Array [
|
||||||
|
Object {
|
||||||
|
"name": "count",
|
||||||
|
"type": "count",
|
||||||
|
},
|
||||||
|
Object {
|
||||||
|
"fieldName": "added",
|
||||||
|
"name": "sum_added",
|
||||||
|
"type": "longSum",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"timestampSpec": Object {
|
||||||
|
"column": "timestamp",
|
||||||
|
"format": "iso",
|
||||||
|
},
|
||||||
|
"transformSpec": Object {
|
||||||
|
"filter": Object {
|
||||||
|
"dimension": "commentLength",
|
||||||
|
"type": "selector",
|
||||||
|
"value": "35",
|
||||||
|
},
|
||||||
|
"transforms": Array [
|
||||||
|
Object {
|
||||||
|
"expression": "concat(\\"channel\\", 'lol')",
|
||||||
|
"name": "channel",
|
||||||
|
"type": "expression",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"ioConfig": Object {
|
||||||
|
"inputFormat": Object {
|
||||||
|
"flattenSpec": Object {
|
||||||
|
"fields": Array [
|
||||||
|
Object {
|
||||||
|
"expr": "$.cityName",
|
||||||
|
"name": "cityNameAlt",
|
||||||
|
"type": "path",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"type": "json",
|
||||||
|
},
|
||||||
|
"inputSource": Object {
|
||||||
|
"type": "http",
|
||||||
|
"uris": Array [
|
||||||
|
"https://static.imply.io/data/wikipedia.json.gz",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
"type": "index_parallel",
|
||||||
|
},
|
||||||
|
"tuningConfig": Object {
|
||||||
|
"type": "index_parallel",
|
||||||
|
},
|
||||||
|
"type": "index_parallel",
|
||||||
|
}
|
||||||
|
`;
|
|
@ -88,17 +88,13 @@ export function updateSchemaWithSample(
|
||||||
let newSpec = spec;
|
let newSpec = spec;
|
||||||
|
|
||||||
if (dimensionMode === 'auto-detect') {
|
if (dimensionMode === 'auto-detect') {
|
||||||
newSpec = deepSet(newSpec, 'dataSchema.parser.parseSpec.dimensionsSpec.dimensions', []);
|
newSpec = deepSet(newSpec, 'dataSchema.dimensionsSpec.dimensions', []);
|
||||||
} else {
|
} else {
|
||||||
newSpec = deepDelete(newSpec, 'dataSchema.parser.parseSpec.dimensionsSpec.dimensionExclusions');
|
newSpec = deepDelete(newSpec, 'dataSchema.dimensionsSpec.dimensionExclusions');
|
||||||
|
|
||||||
const dimensions = getDimensionSpecs(headerAndRows, rollup);
|
const dimensions = getDimensionSpecs(headerAndRows, rollup);
|
||||||
if (dimensions) {
|
if (dimensions) {
|
||||||
newSpec = deepSet(
|
newSpec = deepSet(newSpec, 'dataSchema.dimensionsSpec.dimensions', dimensions);
|
||||||
newSpec,
|
|
||||||
'dataSchema.parser.parseSpec.dimensionsSpec.dimensions',
|
|
||||||
dimensions,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,99 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { downgradeSpec, upgradeSpec } from './ingestion-spec';
|
||||||
|
|
||||||
|
describe('ingestion-spec', () => {
|
||||||
|
const oldSpec = {
|
||||||
|
type: 'index_parallel',
|
||||||
|
ioConfig: {
|
||||||
|
type: 'index_parallel',
|
||||||
|
firehose: {
|
||||||
|
type: 'http',
|
||||||
|
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
tuningConfig: {
|
||||||
|
type: 'index_parallel',
|
||||||
|
},
|
||||||
|
dataSchema: {
|
||||||
|
dataSource: 'wikipedia',
|
||||||
|
granularitySpec: {
|
||||||
|
type: 'uniform',
|
||||||
|
segmentGranularity: 'DAY',
|
||||||
|
queryGranularity: 'HOUR',
|
||||||
|
rollup: true,
|
||||||
|
},
|
||||||
|
parser: {
|
||||||
|
type: 'string',
|
||||||
|
parseSpec: {
|
||||||
|
format: 'json',
|
||||||
|
timestampSpec: {
|
||||||
|
column: 'timestamp',
|
||||||
|
format: 'iso',
|
||||||
|
},
|
||||||
|
dimensionsSpec: {
|
||||||
|
dimensions: ['channel', 'cityName', 'comment'],
|
||||||
|
},
|
||||||
|
flattenSpec: {
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
type: 'path',
|
||||||
|
name: 'cityNameAlt',
|
||||||
|
expr: '$.cityName',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
transformSpec: {
|
||||||
|
transforms: [
|
||||||
|
{
|
||||||
|
type: 'expression',
|
||||||
|
name: 'channel',
|
||||||
|
expression: 'concat("channel", \'lol\')',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
filter: {
|
||||||
|
type: 'selector',
|
||||||
|
dimension: 'commentLength',
|
||||||
|
value: '35',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
metricsSpec: [
|
||||||
|
{
|
||||||
|
name: 'count',
|
||||||
|
type: 'count',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'sum_added',
|
||||||
|
type: 'longSum',
|
||||||
|
fieldName: 'added',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
it('upgrades', () => {
|
||||||
|
expect(upgradeSpec(oldSpec)).toMatchSnapshot();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('round trips', () => {
|
||||||
|
expect(downgradeSpec(upgradeSpec(oldSpec))).toMatchObject(oldSpec);
|
||||||
|
});
|
||||||
|
});
|
File diff suppressed because it is too large
Load Diff
|
@ -16,17 +16,22 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import { deepMove, deepSet } from './object-change';
|
||||||
|
|
||||||
|
export type RuleType =
|
||||||
|
| 'loadForever'
|
||||||
|
| 'loadByInterval'
|
||||||
|
| 'loadByPeriod'
|
||||||
|
| 'dropForever'
|
||||||
|
| 'dropByInterval'
|
||||||
|
| 'dropByPeriod'
|
||||||
|
| 'dropBeforeByPeriod'
|
||||||
|
| 'broadcastForever'
|
||||||
|
| 'broadcastByInterval'
|
||||||
|
| 'broadcastByPeriod';
|
||||||
|
|
||||||
export interface Rule {
|
export interface Rule {
|
||||||
type:
|
type: RuleType;
|
||||||
| 'loadForever'
|
|
||||||
| 'loadByInterval'
|
|
||||||
| 'loadByPeriod'
|
|
||||||
| 'dropForever'
|
|
||||||
| 'dropByInterval'
|
|
||||||
| 'dropByPeriod'
|
|
||||||
| 'broadcastForever'
|
|
||||||
| 'broadcastByInterval'
|
|
||||||
| 'broadcastByPeriod';
|
|
||||||
interval?: string;
|
interval?: string;
|
||||||
period?: string;
|
period?: string;
|
||||||
includeFuture?: boolean;
|
includeFuture?: boolean;
|
||||||
|
@ -34,83 +39,97 @@ export interface Rule {
|
||||||
colocatedDataSources?: string[];
|
colocatedDataSources?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export type LoadType = 'load' | 'drop' | 'broadcast';
|
|
||||||
export type TimeType = 'Forever' | 'ByInterval' | 'ByPeriod';
|
|
||||||
|
|
||||||
export class RuleUtil {
|
export class RuleUtil {
|
||||||
static shouldIncludeFuture(rule: Rule): boolean {
|
static TYPES: RuleType[] = [
|
||||||
if (rule.includeFuture !== false) {
|
'loadForever',
|
||||||
return (
|
'loadByInterval',
|
||||||
rule.type === 'loadByPeriod' ||
|
'loadByPeriod',
|
||||||
rule.type === 'dropByPeriod' ||
|
'dropForever',
|
||||||
rule.type === 'broadcastByPeriod'
|
'dropByInterval',
|
||||||
);
|
'dropByPeriod',
|
||||||
}
|
'dropBeforeByPeriod',
|
||||||
return false;
|
'broadcastForever',
|
||||||
}
|
'broadcastByInterval',
|
||||||
|
'broadcastByPeriod',
|
||||||
|
];
|
||||||
|
|
||||||
static ruleToString(rule: Rule): string {
|
static ruleToString(rule: Rule): string {
|
||||||
return (
|
return [
|
||||||
rule.type +
|
rule.type,
|
||||||
(rule.period ? `(${rule.period})` : '') +
|
rule.period ? `(${rule.period}${rule.includeFuture ? `+future` : ''})` : '',
|
||||||
(rule.interval ? `(${rule.interval})` : '') +
|
rule.interval ? `(${rule.interval})` : '',
|
||||||
(RuleUtil.shouldIncludeFuture(rule) ? `(includeFuture)` : '')
|
].join('');
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static getLoadType(rule: Rule): LoadType {
|
static changeRuleType(rule: Rule, type: RuleType): Rule {
|
||||||
const m = rule.type.match(/^(load|drop|broadcast)(\w+)$/);
|
const newRule = deepSet(rule, 'type', type);
|
||||||
if (!m) throw new Error(`unknown rule type: '${rule.type}'`);
|
|
||||||
return m[1] as any;
|
|
||||||
}
|
|
||||||
|
|
||||||
static getTimeType(rule: Rule): TimeType {
|
if (RuleUtil.hasPeriod(newRule)) {
|
||||||
const m = rule.type.match(/^(load|drop|broadcast)(\w+)$/);
|
if (!newRule.period) newRule.period = 'P1M';
|
||||||
if (!m) throw new Error(`unknown rule type: '${rule.type}'`);
|
} else {
|
||||||
return m[2] as any;
|
delete newRule.period;
|
||||||
}
|
delete newRule.includeFuture;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (RuleUtil.hasInterval(newRule)) {
|
||||||
|
if (!newRule.interval) newRule.interval = '2010-01-01/2020-01-01';
|
||||||
|
} else {
|
||||||
|
delete newRule.interval;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (RuleUtil.hasTieredReplicants(newRule)) {
|
||||||
|
if (!newRule.tieredReplicants) newRule.tieredReplicants = { _default_tier: 2 };
|
||||||
|
} else {
|
||||||
|
delete newRule.tieredReplicants;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!RuleUtil.hasColocatedDataSources(newRule)) delete newRule.colocatedDataSources;
|
||||||
|
|
||||||
static changeLoadType(rule: Rule, loadType: LoadType): Rule {
|
|
||||||
const newRule = Object.assign({}, rule, { type: loadType + RuleUtil.getTimeType(rule) });
|
|
||||||
if (loadType !== 'load') delete newRule.tieredReplicants;
|
|
||||||
if (loadType !== 'broadcast') delete newRule.colocatedDataSources;
|
|
||||||
return newRule;
|
return newRule;
|
||||||
}
|
}
|
||||||
|
|
||||||
static changeTimeType(rule: Rule, timeType: TimeType): Rule {
|
static hasPeriod(rule: Rule): boolean {
|
||||||
const newRule = Object.assign({}, rule, { type: RuleUtil.getLoadType(rule) + timeType });
|
return rule.type.endsWith('ByPeriod');
|
||||||
if (timeType !== 'ByPeriod') delete newRule.period;
|
|
||||||
if (timeType !== 'ByInterval') delete newRule.interval;
|
|
||||||
return newRule;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static changePeriod(rule: Rule, period: string): Rule {
|
static changePeriod(rule: Rule, period: string): Rule {
|
||||||
return Object.assign({}, rule, { period });
|
return deepSet(rule, 'period', period);
|
||||||
|
}
|
||||||
|
|
||||||
|
static hasIncludeFuture(rule: Rule): boolean {
|
||||||
|
return RuleUtil.hasPeriod(rule) && rule.type !== 'dropBeforeByPeriod';
|
||||||
}
|
}
|
||||||
|
|
||||||
static changeIncludeFuture(rule: Rule, includeFuture: boolean): Rule {
|
static changeIncludeFuture(rule: Rule, includeFuture: boolean): Rule {
|
||||||
return Object.assign({}, rule, { includeFuture });
|
return deepSet(rule, 'includeFuture', includeFuture);
|
||||||
|
}
|
||||||
|
|
||||||
|
static hasInterval(rule: Rule): boolean {
|
||||||
|
return rule.type.endsWith('ByInterval');
|
||||||
}
|
}
|
||||||
|
|
||||||
static changeInterval(rule: Rule, interval: string): Rule {
|
static changeInterval(rule: Rule, interval: string): Rule {
|
||||||
return Object.assign({}, rule, { interval });
|
return deepSet(rule, 'interval', interval);
|
||||||
}
|
}
|
||||||
|
|
||||||
static changeTier(rule: Rule, oldTier: string, newTier: string): Rule {
|
static hasTieredReplicants(rule: Rule): boolean {
|
||||||
const newRule = Object.assign({}, rule);
|
return rule.type.startsWith('load');
|
||||||
newRule.tieredReplicants = Object.assign({}, newRule.tieredReplicants);
|
|
||||||
newRule.tieredReplicants[newTier] = newRule.tieredReplicants[oldTier];
|
|
||||||
delete newRule.tieredReplicants[oldTier];
|
|
||||||
return newRule;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static changeTierReplication(rule: Rule, tier: string, replication: number): Rule {
|
static renameTieredReplicants(rule: Rule, oldTier: string, newTier: string): Rule {
|
||||||
const newRule = Object.assign({}, rule);
|
return deepMove(rule, `tieredReplicants.${oldTier}`, `tieredReplicants.${newTier}`);
|
||||||
newRule.tieredReplicants = Object.assign({}, newRule.tieredReplicants, { [tier]: replication });
|
}
|
||||||
return newRule;
|
|
||||||
|
static addTieredReplicant(rule: Rule, tier: string, replication: number): Rule {
|
||||||
|
const newTieredReplicants = deepSet(rule.tieredReplicants || {}, tier, replication);
|
||||||
|
return deepSet(rule, 'tieredReplicants', newTieredReplicants);
|
||||||
|
}
|
||||||
|
|
||||||
|
static hasColocatedDataSources(rule: Rule): boolean {
|
||||||
|
return rule.type.startsWith('broadcast');
|
||||||
}
|
}
|
||||||
|
|
||||||
static changeColocatedDataSources(rule: Rule, colocatedDataSources: string[]): Rule {
|
static changeColocatedDataSources(rule: Rule, colocatedDataSources: string[]): Rule {
|
||||||
return Object.assign({}, rule, { colocatedDataSources });
|
return deepSet(rule, 'colocatedDataSources', colocatedDataSources);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,6 +111,16 @@ export function deepDelete<T extends Record<string, any>>(value: T, path: string
|
||||||
return valueCopy;
|
return valueCopy;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function deepMove<T extends Record<string, any>>(
|
||||||
|
value: T,
|
||||||
|
fromPath: string,
|
||||||
|
toPath: string,
|
||||||
|
): T {
|
||||||
|
value = deepSet(value, toPath, deepGet(value, fromPath));
|
||||||
|
value = deepDelete(value, fromPath);
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
export function deepExtend<T extends Record<string, any>>(target: T, diff: Record<string, any>): T {
|
export function deepExtend<T extends Record<string, any>>(target: T, diff: Record<string, any>): T {
|
||||||
if (typeof target !== 'object') throw new TypeError(`Invalid target`);
|
if (typeof target !== 'object') throw new TypeError(`Invalid target`);
|
||||||
if (typeof diff !== 'object') throw new TypeError(`Invalid diff`);
|
if (typeof diff !== 'object') throw new TypeError(`Invalid diff`);
|
||||||
|
|
|
@ -22,25 +22,24 @@ import { getDruidErrorMessage, queryDruidRune } from './druid-query';
|
||||||
import { alphanumericCompare, filterMap, sortWithPrefixSuffix } from './general';
|
import { alphanumericCompare, filterMap, sortWithPrefixSuffix } from './general';
|
||||||
import {
|
import {
|
||||||
DimensionsSpec,
|
DimensionsSpec,
|
||||||
getEmptyTimestampSpec,
|
getDummyTimestampSpec,
|
||||||
getSpecType,
|
getSpecType,
|
||||||
IngestionSpec,
|
IngestionSpec,
|
||||||
|
IngestionType,
|
||||||
|
InputFormat,
|
||||||
IoConfig,
|
IoConfig,
|
||||||
isColumnTimestampSpec,
|
isColumnTimestampSpec,
|
||||||
isIngestSegment,
|
isDruidSource,
|
||||||
MetricSpec,
|
MetricSpec,
|
||||||
Parser,
|
TimestampSpec,
|
||||||
ParseSpec,
|
|
||||||
Transform,
|
Transform,
|
||||||
TransformSpec,
|
TransformSpec,
|
||||||
|
upgradeSpec,
|
||||||
} from './ingestion-spec';
|
} from './ingestion-spec';
|
||||||
import { deepGet, deepSet, whitelistKeys } from './object-change';
|
import { deepGet, deepSet } from './object-change';
|
||||||
|
|
||||||
const MS_IN_HOUR = 60 * 60 * 1000;
|
|
||||||
|
|
||||||
const SAMPLER_URL = `/druid/indexer/v1/sampler`;
|
const SAMPLER_URL = `/druid/indexer/v1/sampler`;
|
||||||
const BASE_SAMPLER_CONFIG: SamplerConfig = {
|
const BASE_SAMPLER_CONFIG: SamplerConfig = {
|
||||||
// skipCache: true,
|
|
||||||
numRows: 500,
|
numRows: 500,
|
||||||
timeoutMs: 15000,
|
timeoutMs: 15000,
|
||||||
};
|
};
|
||||||
|
@ -54,25 +53,23 @@ export interface SampleSpec {
|
||||||
export interface SamplerConfig {
|
export interface SamplerConfig {
|
||||||
numRows?: number;
|
numRows?: number;
|
||||||
timeoutMs?: number;
|
timeoutMs?: number;
|
||||||
cacheKey?: string;
|
|
||||||
skipCache?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SampleResponse {
|
export interface SampleResponse {
|
||||||
cacheKey?: string;
|
|
||||||
data: SampleEntry[];
|
data: SampleEntry[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type CacheRows = Record<string, any>[];
|
||||||
|
|
||||||
export interface SampleResponseWithExtraInfo extends SampleResponse {
|
export interface SampleResponseWithExtraInfo extends SampleResponse {
|
||||||
queryGranularity?: any;
|
queryGranularity?: any;
|
||||||
timestampSpec?: any;
|
|
||||||
rollup?: boolean;
|
rollup?: boolean;
|
||||||
columns?: Record<string, any>;
|
columns?: Record<string, any>;
|
||||||
aggregators?: Record<string, any>;
|
aggregators?: Record<string, any>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SampleEntry {
|
export interface SampleEntry {
|
||||||
raw: string;
|
input: Record<string, any>;
|
||||||
parsed?: Record<string, any>;
|
parsed?: Record<string, any>;
|
||||||
unparseable?: boolean;
|
unparseable?: boolean;
|
||||||
error?: string;
|
error?: string;
|
||||||
|
@ -101,12 +98,35 @@ function dedupe(xs: string[]): string[] {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
type SamplerType = 'index' | 'kafka' | 'kinesis';
|
export function getCacheRowsFromSampleResponse(
|
||||||
|
sampleResponse: SampleResponse,
|
||||||
|
useParsed = false,
|
||||||
|
): CacheRows {
|
||||||
|
const key = useParsed ? 'parsed' : 'input';
|
||||||
|
return filterMap(sampleResponse.data, d => d[key]).slice(0, 20);
|
||||||
|
}
|
||||||
|
|
||||||
export function getSamplerType(spec: IngestionSpec): SamplerType {
|
export function applyCache(sampleSpec: SampleSpec, cacheRows: CacheRows) {
|
||||||
const specType = getSpecType(spec);
|
if (!cacheRows) return sampleSpec;
|
||||||
if (specType === 'kafka' || specType === 'kinesis') return specType;
|
|
||||||
return 'index';
|
// If this is already an inline spec there is nothing to do
|
||||||
|
if (deepGet(sampleSpec, 'spec.ioConfig.inputSource.type') === 'inline') return sampleSpec;
|
||||||
|
|
||||||
|
// Make the spec into an inline json spec
|
||||||
|
sampleSpec = deepSet(sampleSpec, 'type', 'index');
|
||||||
|
sampleSpec = deepSet(sampleSpec, 'spec.type', 'index');
|
||||||
|
sampleSpec = deepSet(sampleSpec, 'spec.ioConfig.type', 'index');
|
||||||
|
sampleSpec = deepSet(sampleSpec, 'spec.ioConfig.inputSource', {
|
||||||
|
type: 'inline',
|
||||||
|
data: cacheRows.map(r => JSON.stringify(r)).join('\n'),
|
||||||
|
});
|
||||||
|
|
||||||
|
const flattenSpec = deepGet(sampleSpec, 'spec.ioConfig.inputFormat.flattenSpec');
|
||||||
|
const inputFormat: InputFormat = { type: 'json' };
|
||||||
|
if (flattenSpec) inputFormat.flattenSpec = flattenSpec;
|
||||||
|
sampleSpec = deepSet(sampleSpec, 'spec.ioConfig.inputFormat', inputFormat);
|
||||||
|
|
||||||
|
return sampleSpec;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function headerFromSampleResponse(
|
export function headerFromSampleResponse(
|
||||||
|
@ -140,7 +160,7 @@ export function headerAndRowsFromSampleResponse(
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getOverlordModules(): Promise<string[]> {
|
export async function getProxyOverlordModules(): Promise<string[]> {
|
||||||
let statusResp: any;
|
let statusResp: any;
|
||||||
try {
|
try {
|
||||||
statusResp = await axios.get(`/proxy/overlord/status`);
|
statusResp = await axios.get(`/proxy/overlord/status`);
|
||||||
|
@ -155,6 +175,8 @@ export async function postToSampler(
|
||||||
sampleSpec: SampleSpec,
|
sampleSpec: SampleSpec,
|
||||||
forStr: string,
|
forStr: string,
|
||||||
): Promise<SampleResponse> {
|
): Promise<SampleResponse> {
|
||||||
|
sampleSpec = fixSamplerTypes(sampleSpec);
|
||||||
|
|
||||||
let sampleResp: any;
|
let sampleResp: any;
|
||||||
try {
|
try {
|
||||||
sampleResp = await axios.post(`${SAMPLER_URL}?for=${forStr}`, sampleSpec);
|
sampleResp = await axios.post(`${SAMPLER_URL}?for=${forStr}`, sampleSpec);
|
||||||
|
@ -169,77 +191,72 @@ export type SampleStrategy = 'start' | 'end';
|
||||||
|
|
||||||
function makeSamplerIoConfig(
|
function makeSamplerIoConfig(
|
||||||
ioConfig: IoConfig,
|
ioConfig: IoConfig,
|
||||||
samplerType: SamplerType,
|
specType: IngestionType,
|
||||||
sampleStrategy: SampleStrategy,
|
sampleStrategy: SampleStrategy,
|
||||||
): IoConfig {
|
): IoConfig {
|
||||||
ioConfig = deepSet(ioConfig || {}, 'type', samplerType);
|
ioConfig = deepSet(ioConfig || {}, 'type', specType);
|
||||||
if (samplerType === 'kafka') {
|
if (specType === 'kafka') {
|
||||||
ioConfig = deepSet(ioConfig, 'useEarliestOffset', sampleStrategy === 'start');
|
ioConfig = deepSet(ioConfig, 'useEarliestOffset', sampleStrategy === 'start');
|
||||||
} else if (samplerType === 'kinesis') {
|
} else if (specType === 'kinesis') {
|
||||||
ioConfig = deepSet(ioConfig, 'useEarliestSequenceNumber', sampleStrategy === 'start');
|
ioConfig = deepSet(ioConfig, 'useEarliestSequenceNumber', sampleStrategy === 'start');
|
||||||
}
|
}
|
||||||
return ioConfig;
|
return ioConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function scopes down the interval of an ingestSegment firehose for the data sampler
|
This is a hack to deal with the fact that the sampler can not deal with the index_parallel type
|
||||||
* this is needed because the ingestSegment firehose gets the interval you are sampling over,
|
|
||||||
* looks up the corresponding segments and segment locations from metadata store, downloads
|
|
||||||
* every segment from deep storage to disk, and then maps all the segments into memory;
|
|
||||||
* and this happens in the constructor before the timer thread is even created meaning the sampler
|
|
||||||
* will time out on a larger interval.
|
|
||||||
* This is essentially a workaround for https://github.com/apache/incubator-druid/issues/8448
|
|
||||||
* @param ioConfig The IO Config to scope down the interval of
|
|
||||||
*/
|
*/
|
||||||
export async function scopeDownIngestSegmentFirehoseIntervalIfNeeded(
|
function fixSamplerTypes(sampleSpec: SampleSpec): SampleSpec {
|
||||||
ioConfig: IoConfig,
|
let samplerType: string = getSpecType(sampleSpec.spec);
|
||||||
): Promise<IoConfig> {
|
if (samplerType === 'index_parallel') {
|
||||||
if (deepGet(ioConfig, 'firehose.type') !== 'ingestSegment') return ioConfig;
|
samplerType = 'index';
|
||||||
const interval = deepGet(ioConfig, 'firehose.interval');
|
}
|
||||||
const intervalParts = interval.split('/');
|
|
||||||
const start = new Date(intervalParts[0]);
|
|
||||||
if (isNaN(start.valueOf())) throw new Error(`could not decode interval start`);
|
|
||||||
const end = new Date(intervalParts[1]);
|
|
||||||
if (isNaN(end.valueOf())) throw new Error(`could not decode interval end`);
|
|
||||||
|
|
||||||
// Less than or equal to 1 hour so there is no need to adjust intervals
|
sampleSpec = deepSet(sampleSpec, 'type', samplerType);
|
||||||
if (Math.abs(end.valueOf() - start.valueOf()) <= MS_IN_HOUR) return ioConfig;
|
sampleSpec = deepSet(sampleSpec, 'spec.type', samplerType);
|
||||||
|
sampleSpec = deepSet(sampleSpec, 'spec.ioConfig.type', samplerType);
|
||||||
|
sampleSpec = deepSet(sampleSpec, 'spec.tuningConfig.type', samplerType);
|
||||||
|
return sampleSpec;
|
||||||
|
}
|
||||||
|
|
||||||
const dataSourceMetadataResponse = await queryDruidRune({
|
function cleanupQueryGranularity(queryGranularity: any): any {
|
||||||
queryType: 'dataSourceMetadata',
|
let queryGranularityType = deepGet(queryGranularity, 'type');
|
||||||
dataSource: deepGet(ioConfig, 'firehose.dataSource'),
|
if (typeof queryGranularityType !== 'string') return queryGranularity;
|
||||||
});
|
queryGranularityType = queryGranularityType.toUpperCase();
|
||||||
|
|
||||||
const maxIngestedEventTime = new Date(
|
const knownGranularity = [
|
||||||
deepGet(dataSourceMetadataResponse, '0.result.maxIngestedEventTime'),
|
'NONE',
|
||||||
);
|
'SECOND',
|
||||||
|
'MINUTE',
|
||||||
|
'HOUR',
|
||||||
|
'DAY',
|
||||||
|
'WEEK',
|
||||||
|
'MONTH',
|
||||||
|
'YEAR',
|
||||||
|
].includes(queryGranularityType);
|
||||||
|
|
||||||
// If invalid maxIngestedEventTime do nothing
|
return knownGranularity ? queryGranularityType : queryGranularity;
|
||||||
if (isNaN(maxIngestedEventTime.valueOf())) return ioConfig;
|
|
||||||
|
|
||||||
// If maxIngestedEventTime is before the start of the interval do nothing
|
|
||||||
if (maxIngestedEventTime < start) return ioConfig;
|
|
||||||
|
|
||||||
const newEnd = maxIngestedEventTime < end ? maxIngestedEventTime : end;
|
|
||||||
const newStart = new Date(newEnd.valueOf() - MS_IN_HOUR); // Set start to 1 hour ago
|
|
||||||
|
|
||||||
return deepSet(
|
|
||||||
ioConfig,
|
|
||||||
'firehose.interval',
|
|
||||||
`${newStart.toISOString()}/${newEnd.toISOString()}`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function sampleForConnect(
|
export async function sampleForConnect(
|
||||||
spec: IngestionSpec,
|
spec: IngestionSpec,
|
||||||
sampleStrategy: SampleStrategy,
|
sampleStrategy: SampleStrategy,
|
||||||
): Promise<SampleResponseWithExtraInfo> {
|
): Promise<SampleResponseWithExtraInfo> {
|
||||||
const samplerType = getSamplerType(spec);
|
const samplerType = getSpecType(spec);
|
||||||
const ioConfig: IoConfig = await scopeDownIngestSegmentFirehoseIntervalIfNeeded(
|
let ioConfig: IoConfig = makeSamplerIoConfig(
|
||||||
makeSamplerIoConfig(deepGet(spec, 'ioConfig'), samplerType, sampleStrategy),
|
deepGet(spec, 'ioConfig'),
|
||||||
|
samplerType,
|
||||||
|
sampleStrategy,
|
||||||
);
|
);
|
||||||
|
|
||||||
const ingestSegmentMode = isIngestSegment(spec);
|
const reingestMode = isDruidSource(spec);
|
||||||
|
if (!reingestMode) {
|
||||||
|
ioConfig = deepSet(ioConfig, 'inputFormat', {
|
||||||
|
type: 'regex',
|
||||||
|
pattern: '(.*)',
|
||||||
|
columns: ['raw'],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
const sampleSpec: SampleSpec = {
|
const sampleSpec: SampleSpec = {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
|
@ -248,16 +265,8 @@ export async function sampleForConnect(
|
||||||
ioConfig,
|
ioConfig,
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
timestampSpec: getDummyTimestampSpec(),
|
||||||
type: 'string',
|
dimensionsSpec: {},
|
||||||
parseSpec: {
|
|
||||||
format: 'regex',
|
|
||||||
pattern: '(.*)',
|
|
||||||
columns: ['a'],
|
|
||||||
dimensionsSpec: {},
|
|
||||||
timestampSpec: getEmptyTimestampSpec(),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
} as any,
|
} as any,
|
||||||
samplerConfig: BASE_SAMPLER_CONFIG,
|
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||||
|
@ -267,11 +276,11 @@ export async function sampleForConnect(
|
||||||
|
|
||||||
if (!samplerResponse.data.length) return samplerResponse;
|
if (!samplerResponse.data.length) return samplerResponse;
|
||||||
|
|
||||||
if (ingestSegmentMode) {
|
if (reingestMode) {
|
||||||
const segmentMetadataResponse = await queryDruidRune({
|
const segmentMetadataResponse = await queryDruidRune({
|
||||||
queryType: 'segmentMetadata',
|
queryType: 'segmentMetadata',
|
||||||
dataSource: deepGet(ioConfig, 'firehose.dataSource'),
|
dataSource: deepGet(ioConfig, 'inputSource.dataSource'),
|
||||||
intervals: [deepGet(ioConfig, 'firehose.interval')],
|
intervals: [deepGet(ioConfig, 'inputSource.interval')],
|
||||||
merge: true,
|
merge: true,
|
||||||
lenientAggregatorMerge: true,
|
lenientAggregatorMerge: true,
|
||||||
analysisTypes: ['timestampSpec', 'queryGranularity', 'aggregators', 'rollup'],
|
analysisTypes: ['timestampSpec', 'queryGranularity', 'aggregators', 'rollup'],
|
||||||
|
@ -279,8 +288,9 @@ export async function sampleForConnect(
|
||||||
|
|
||||||
if (Array.isArray(segmentMetadataResponse) && segmentMetadataResponse.length === 1) {
|
if (Array.isArray(segmentMetadataResponse) && segmentMetadataResponse.length === 1) {
|
||||||
const segmentMetadataResponse0 = segmentMetadataResponse[0];
|
const segmentMetadataResponse0 = segmentMetadataResponse[0];
|
||||||
samplerResponse.queryGranularity = segmentMetadataResponse0.queryGranularity;
|
samplerResponse.queryGranularity = cleanupQueryGranularity(
|
||||||
samplerResponse.timestampSpec = segmentMetadataResponse0.timestampSpec;
|
segmentMetadataResponse0.queryGranularity,
|
||||||
|
);
|
||||||
samplerResponse.rollup = segmentMetadataResponse0.rollup;
|
samplerResponse.rollup = segmentMetadataResponse0.rollup;
|
||||||
samplerResponse.columns = segmentMetadataResponse0.columns;
|
samplerResponse.columns = segmentMetadataResponse0.columns;
|
||||||
samplerResponse.aggregators = segmentMetadataResponse0.aggregators;
|
samplerResponse.aggregators = segmentMetadataResponse0.aggregators;
|
||||||
|
@ -295,35 +305,26 @@ export async function sampleForConnect(
|
||||||
export async function sampleForParser(
|
export async function sampleForParser(
|
||||||
spec: IngestionSpec,
|
spec: IngestionSpec,
|
||||||
sampleStrategy: SampleStrategy,
|
sampleStrategy: SampleStrategy,
|
||||||
cacheKey: string | undefined,
|
|
||||||
): Promise<SampleResponse> {
|
): Promise<SampleResponse> {
|
||||||
const samplerType = getSamplerType(spec);
|
const samplerType = getSpecType(spec);
|
||||||
const ioConfig: IoConfig = await scopeDownIngestSegmentFirehoseIntervalIfNeeded(
|
const ioConfig: IoConfig = makeSamplerIoConfig(
|
||||||
makeSamplerIoConfig(deepGet(spec, 'ioConfig'), samplerType, sampleStrategy),
|
deepGet(spec, 'ioConfig'),
|
||||||
|
samplerType,
|
||||||
|
sampleStrategy,
|
||||||
);
|
);
|
||||||
const parser: Parser = deepGet(spec, 'dataSchema.parser') || {};
|
|
||||||
|
|
||||||
const sampleSpec: SampleSpec = {
|
const sampleSpec: SampleSpec = {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
spec: {
|
spec: {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
ioConfig: deepSet(ioConfig, 'type', samplerType),
|
ioConfig,
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
timestampSpec: getDummyTimestampSpec(),
|
||||||
type: parser.type,
|
dimensionsSpec: {},
|
||||||
parseSpec: (parser.parseSpec
|
|
||||||
? Object.assign({}, parser.parseSpec, {
|
|
||||||
dimensionsSpec: {},
|
|
||||||
timestampSpec: getEmptyTimestampSpec(),
|
|
||||||
})
|
|
||||||
: undefined) as any,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
samplerConfig: Object.assign({}, BASE_SAMPLER_CONFIG, {
|
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||||
cacheKey,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return postToSampler(sampleSpec, 'parser');
|
return postToSampler(sampleSpec, 'parser');
|
||||||
|
@ -331,17 +332,10 @@ export async function sampleForParser(
|
||||||
|
|
||||||
export async function sampleForTimestamp(
|
export async function sampleForTimestamp(
|
||||||
spec: IngestionSpec,
|
spec: IngestionSpec,
|
||||||
sampleStrategy: SampleStrategy,
|
cacheRows: CacheRows,
|
||||||
cacheKey: string | undefined,
|
|
||||||
): Promise<SampleResponse> {
|
): Promise<SampleResponse> {
|
||||||
const samplerType = getSamplerType(spec);
|
const samplerType = getSpecType(spec);
|
||||||
const ioConfig: IoConfig = await scopeDownIngestSegmentFirehoseIntervalIfNeeded(
|
const timestampSpec: TimestampSpec = deepGet(spec, 'dataSchema.timestampSpec');
|
||||||
makeSamplerIoConfig(deepGet(spec, 'ioConfig'), samplerType, sampleStrategy),
|
|
||||||
);
|
|
||||||
const parser: Parser = deepGet(spec, 'dataSchema.parser') || {};
|
|
||||||
const parseSpec: ParseSpec = deepGet(spec, 'dataSchema.parser.parseSpec') || {};
|
|
||||||
const timestampSpec: ParseSpec =
|
|
||||||
deepGet(spec, 'dataSchema.parser.parseSpec.timestampSpec') || getEmptyTimestampSpec();
|
|
||||||
const columnTimestampSpec = isColumnTimestampSpec(timestampSpec);
|
const columnTimestampSpec = isColumnTimestampSpec(timestampSpec);
|
||||||
|
|
||||||
// First do a query with a static timestamp spec
|
// First do a query with a static timestamp spec
|
||||||
|
@ -349,26 +343,20 @@ export async function sampleForTimestamp(
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
spec: {
|
spec: {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
ioConfig: deepSet(ioConfig, 'type', samplerType),
|
ioConfig: deepGet(spec, 'ioConfig'),
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
dimensionsSpec: {},
|
||||||
type: parser.type,
|
timestampSpec: columnTimestampSpec ? getDummyTimestampSpec() : timestampSpec,
|
||||||
parseSpec: (parser.parseSpec
|
|
||||||
? Object.assign({}, parseSpec, {
|
|
||||||
dimensionsSpec: {},
|
|
||||||
timestampSpec: columnTimestampSpec ? getEmptyTimestampSpec() : timestampSpec,
|
|
||||||
})
|
|
||||||
: undefined) as any,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
samplerConfig: Object.assign({}, BASE_SAMPLER_CONFIG, {
|
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||||
cacheKey,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const sampleColumns = await postToSampler(sampleSpecColumns, 'timestamp-columns');
|
const sampleColumns = await postToSampler(
|
||||||
|
applyCache(sampleSpecColumns, cacheRows),
|
||||||
|
'timestamp-columns',
|
||||||
|
);
|
||||||
|
|
||||||
// If we are not parsing a column then there is nothing left to do
|
// If we are not parsing a column then there is nothing left to do
|
||||||
if (!columnTimestampSpec) return sampleColumns;
|
if (!columnTimestampSpec) return sampleColumns;
|
||||||
|
@ -379,28 +367,19 @@ export async function sampleForTimestamp(
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
spec: {
|
spec: {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
ioConfig: deepSet(ioConfig, 'type', samplerType),
|
ioConfig: deepGet(spec, 'ioConfig'),
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
dimensionsSpec: {},
|
||||||
type: parser.type,
|
timestampSpec,
|
||||||
parseSpec: Object.assign({}, parseSpec, {
|
|
||||||
dimensionsSpec: {},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
samplerConfig: Object.assign({}, BASE_SAMPLER_CONFIG, {
|
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||||
cacheKey: sampleColumns.cacheKey || cacheKey,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const sampleTime = await postToSampler(sampleSpec, 'timestamp-time');
|
const sampleTime = await postToSampler(applyCache(sampleSpec, cacheRows), 'timestamp-time');
|
||||||
|
|
||||||
if (
|
if (sampleTime.data.length !== sampleColumns.data.length) {
|
||||||
sampleTime.cacheKey !== sampleColumns.cacheKey ||
|
|
||||||
sampleTime.data.length !== sampleColumns.data.length
|
|
||||||
) {
|
|
||||||
// If the two responses did not come from the same cache (or for some reason have different lengths) then
|
// If the two responses did not come from the same cache (or for some reason have different lengths) then
|
||||||
// just return the one with the parsed time column.
|
// just return the one with the parsed time column.
|
||||||
return sampleTime;
|
return sampleTime;
|
||||||
|
@ -420,16 +399,11 @@ export async function sampleForTimestamp(
|
||||||
|
|
||||||
export async function sampleForTransform(
|
export async function sampleForTransform(
|
||||||
spec: IngestionSpec,
|
spec: IngestionSpec,
|
||||||
sampleStrategy: SampleStrategy,
|
cacheRows: CacheRows,
|
||||||
cacheKey: string | undefined,
|
|
||||||
): Promise<SampleResponse> {
|
): Promise<SampleResponse> {
|
||||||
const samplerType = getSamplerType(spec);
|
const samplerType = getSpecType(spec);
|
||||||
const ioConfig: IoConfig = await scopeDownIngestSegmentFirehoseIntervalIfNeeded(
|
const inputFormatColumns: string[] = deepGet(spec, 'ioConfig.inputFormat.columns') || [];
|
||||||
makeSamplerIoConfig(deepGet(spec, 'ioConfig'), samplerType, sampleStrategy),
|
const timestampSpec: TimestampSpec = deepGet(spec, 'dataSchema.timestampSpec');
|
||||||
);
|
|
||||||
const parser: Parser = deepGet(spec, 'dataSchema.parser') || {};
|
|
||||||
const parseSpec: ParseSpec = deepGet(spec, 'dataSchema.parser.parseSpec') || {};
|
|
||||||
const parserColumns: string[] = deepGet(parseSpec, 'columns') || [];
|
|
||||||
const transforms: Transform[] = deepGet(spec, 'dataSchema.transformSpec.transforms') || [];
|
const transforms: Transform[] = deepGet(spec, 'dataSchema.transformSpec.transforms') || [];
|
||||||
|
|
||||||
// Extra step to simulate auto detecting dimension with transforms
|
// Extra step to simulate auto detecting dimension with transforms
|
||||||
|
@ -439,29 +413,26 @@ export async function sampleForTransform(
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
spec: {
|
spec: {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
ioConfig: deepSet(ioConfig, 'type', samplerType),
|
ioConfig: deepGet(spec, 'ioConfig'),
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
timestampSpec,
|
||||||
type: parser.type,
|
dimensionsSpec: {},
|
||||||
parseSpec: Object.assign({}, parseSpec, {
|
|
||||||
dimensionsSpec: {},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
samplerConfig: Object.assign({}, BASE_SAMPLER_CONFIG, {
|
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||||
cacheKey,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const sampleResponseHack = await postToSampler(sampleSpecHack, 'transform-pre');
|
const sampleResponseHack = await postToSampler(
|
||||||
|
applyCache(sampleSpecHack, cacheRows),
|
||||||
|
'transform-pre',
|
||||||
|
);
|
||||||
|
|
||||||
specialDimensionSpec.dimensions = dedupe(
|
specialDimensionSpec.dimensions = dedupe(
|
||||||
headerFromSampleResponse(
|
headerFromSampleResponse(
|
||||||
sampleResponseHack,
|
sampleResponseHack,
|
||||||
'__time',
|
'__time',
|
||||||
['__time'].concat(parserColumns),
|
['__time'].concat(inputFormatColumns),
|
||||||
).concat(transforms.map(t => t.name)),
|
).concat(transforms.map(t => t.name)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -470,40 +441,29 @@ export async function sampleForTransform(
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
spec: {
|
spec: {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
ioConfig: deepSet(ioConfig, 'type', samplerType),
|
ioConfig: deepGet(spec, 'ioConfig'),
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
timestampSpec,
|
||||||
type: parser.type,
|
dimensionsSpec: specialDimensionSpec, // Hack Hack Hack
|
||||||
parseSpec: Object.assign({}, parseSpec, {
|
|
||||||
dimensionsSpec: specialDimensionSpec, // Hack Hack Hack
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
transformSpec: {
|
transformSpec: {
|
||||||
transforms,
|
transforms,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
samplerConfig: Object.assign({}, BASE_SAMPLER_CONFIG, {
|
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||||
cacheKey,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return postToSampler(sampleSpec, 'transform');
|
return postToSampler(applyCache(sampleSpec, cacheRows), 'transform');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function sampleForFilter(
|
export async function sampleForFilter(
|
||||||
spec: IngestionSpec,
|
spec: IngestionSpec,
|
||||||
sampleStrategy: SampleStrategy,
|
cacheRows: CacheRows,
|
||||||
cacheKey: string | undefined,
|
|
||||||
): Promise<SampleResponse> {
|
): Promise<SampleResponse> {
|
||||||
const samplerType = getSamplerType(spec);
|
const samplerType = getSpecType(spec);
|
||||||
const ioConfig: IoConfig = await scopeDownIngestSegmentFirehoseIntervalIfNeeded(
|
const inputFormatColumns: string[] = deepGet(spec, 'ioConfig.inputFormat.columns') || [];
|
||||||
makeSamplerIoConfig(deepGet(spec, 'ioConfig'), samplerType, sampleStrategy),
|
const timestampSpec: TimestampSpec = deepGet(spec, 'dataSchema.timestampSpec');
|
||||||
);
|
|
||||||
const parser: Parser = deepGet(spec, 'dataSchema.parser') || {};
|
|
||||||
const parseSpec: ParseSpec = deepGet(spec, 'dataSchema.parser.parseSpec') || {};
|
|
||||||
const parserColumns: string[] = deepGet(parser, 'columns') || [];
|
|
||||||
const transforms: Transform[] = deepGet(spec, 'dataSchema.transformSpec.transforms') || [];
|
const transforms: Transform[] = deepGet(spec, 'dataSchema.transformSpec.transforms') || [];
|
||||||
const filter: any = deepGet(spec, 'dataSchema.transformSpec.filter');
|
const filter: any = deepGet(spec, 'dataSchema.transformSpec.filter');
|
||||||
|
|
||||||
|
@ -514,29 +474,26 @@ export async function sampleForFilter(
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
spec: {
|
spec: {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
ioConfig: deepSet(ioConfig, 'type', samplerType),
|
ioConfig: deepGet(spec, 'ioConfig'),
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
timestampSpec,
|
||||||
type: parser.type,
|
dimensionsSpec: {},
|
||||||
parseSpec: Object.assign({}, parseSpec, {
|
|
||||||
dimensionsSpec: {},
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
samplerConfig: Object.assign({}, BASE_SAMPLER_CONFIG, {
|
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||||
cacheKey,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const sampleResponseHack = await postToSampler(sampleSpecHack, 'filter-pre');
|
const sampleResponseHack = await postToSampler(
|
||||||
|
applyCache(sampleSpecHack, cacheRows),
|
||||||
|
'filter-pre',
|
||||||
|
);
|
||||||
|
|
||||||
specialDimensionSpec.dimensions = dedupe(
|
specialDimensionSpec.dimensions = dedupe(
|
||||||
headerFromSampleResponse(
|
headerFromSampleResponse(
|
||||||
sampleResponseHack,
|
sampleResponseHack,
|
||||||
'__time',
|
'__time',
|
||||||
['__time'].concat(parserColumns),
|
['__time'].concat(inputFormatColumns),
|
||||||
).concat(transforms.map(t => t.name)),
|
).concat(transforms.map(t => t.name)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -545,41 +502,32 @@ export async function sampleForFilter(
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
spec: {
|
spec: {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
ioConfig: deepSet(ioConfig, 'type', samplerType),
|
ioConfig: deepGet(spec, 'ioConfig'),
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
timestampSpec,
|
||||||
type: parser.type,
|
dimensionsSpec: specialDimensionSpec, // Hack Hack Hack
|
||||||
parseSpec: Object.assign({}, parseSpec, {
|
|
||||||
dimensionsSpec: specialDimensionSpec, // Hack Hack Hack
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
transformSpec: {
|
transformSpec: {
|
||||||
transforms,
|
transforms,
|
||||||
filter,
|
filter,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
samplerConfig: Object.assign({}, BASE_SAMPLER_CONFIG, {
|
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||||
cacheKey,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return postToSampler(sampleSpec, 'filter');
|
return postToSampler(applyCache(sampleSpec, cacheRows), 'filter');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function sampleForSchema(
|
export async function sampleForSchema(
|
||||||
spec: IngestionSpec,
|
spec: IngestionSpec,
|
||||||
sampleStrategy: SampleStrategy,
|
cacheRows: CacheRows,
|
||||||
cacheKey: string | undefined,
|
|
||||||
): Promise<SampleResponse> {
|
): Promise<SampleResponse> {
|
||||||
const samplerType = getSamplerType(spec);
|
const samplerType = getSpecType(spec);
|
||||||
const ioConfig: IoConfig = await scopeDownIngestSegmentFirehoseIntervalIfNeeded(
|
const timestampSpec: TimestampSpec = deepGet(spec, 'dataSchema.timestampSpec');
|
||||||
makeSamplerIoConfig(deepGet(spec, 'ioConfig'), samplerType, sampleStrategy),
|
|
||||||
);
|
|
||||||
const parser: Parser = deepGet(spec, 'dataSchema.parser') || {};
|
|
||||||
const transformSpec: TransformSpec =
|
const transformSpec: TransformSpec =
|
||||||
deepGet(spec, 'dataSchema.transformSpec') || ({} as TransformSpec);
|
deepGet(spec, 'dataSchema.transformSpec') || ({} as TransformSpec);
|
||||||
|
const dimensionsSpec: DimensionsSpec = deepGet(spec, 'dataSchema.dimensionsSpec');
|
||||||
const metricsSpec: MetricSpec[] = deepGet(spec, 'dataSchema.metricsSpec') || [];
|
const metricsSpec: MetricSpec[] = deepGet(spec, 'dataSchema.metricsSpec') || [];
|
||||||
const queryGranularity: string =
|
const queryGranularity: string =
|
||||||
deepGet(spec, 'dataSchema.granularitySpec.queryGranularity') || 'NONE';
|
deepGet(spec, 'dataSchema.granularitySpec.queryGranularity') || 'NONE';
|
||||||
|
@ -588,56 +536,49 @@ export async function sampleForSchema(
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
spec: {
|
spec: {
|
||||||
type: samplerType,
|
type: samplerType,
|
||||||
ioConfig: deepSet(ioConfig, 'type', samplerType),
|
ioConfig: deepGet(spec, 'ioConfig'),
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: whitelistKeys(parser, ['type', 'parseSpec']) as Parser,
|
timestampSpec,
|
||||||
transformSpec,
|
transformSpec,
|
||||||
metricsSpec,
|
|
||||||
granularitySpec: {
|
granularitySpec: {
|
||||||
queryGranularity,
|
queryGranularity,
|
||||||
},
|
},
|
||||||
|
dimensionsSpec,
|
||||||
|
metricsSpec,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
samplerConfig: Object.assign({}, BASE_SAMPLER_CONFIG, {
|
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||||
cacheKey,
|
|
||||||
}),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
return postToSampler(sampleSpec, 'schema');
|
return postToSampler(applyCache(sampleSpec, cacheRows), 'schema');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function sampleForExampleManifests(
|
export async function sampleForExampleManifests(
|
||||||
exampleManifestUrl: string,
|
exampleManifestUrl: string,
|
||||||
): Promise<ExampleManifest[]> {
|
): Promise<ExampleManifest[]> {
|
||||||
const sampleSpec: SampleSpec = {
|
const exampleSpec: SampleSpec = {
|
||||||
type: 'index',
|
type: 'index_parallel',
|
||||||
spec: {
|
spec: {
|
||||||
type: 'index',
|
type: 'index_parallel',
|
||||||
ioConfig: {
|
ioConfig: {
|
||||||
type: 'index',
|
type: 'index_parallel',
|
||||||
firehose: { type: 'http', uris: [exampleManifestUrl] },
|
inputSource: { type: 'http', uris: [exampleManifestUrl] },
|
||||||
|
inputFormat: { type: 'tsv', findColumnsFromHeader: true },
|
||||||
},
|
},
|
||||||
dataSchema: {
|
dataSchema: {
|
||||||
dataSource: 'sample',
|
dataSource: 'sample',
|
||||||
parser: {
|
timestampSpec: {
|
||||||
type: 'string',
|
column: 'timestamp',
|
||||||
parseSpec: {
|
missingValue: '2010-01-01T00:00:00Z',
|
||||||
format: 'tsv',
|
|
||||||
timestampSpec: {
|
|
||||||
column: 'timestamp',
|
|
||||||
missingValue: '2010-01-01T00:00:00Z',
|
|
||||||
},
|
|
||||||
dimensionsSpec: {},
|
|
||||||
hasHeaderRow: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
dimensionsSpec: {},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
samplerConfig: { numRows: 50, timeoutMs: 10000, skipCache: true },
|
samplerConfig: { numRows: 50, timeoutMs: 10000 },
|
||||||
};
|
};
|
||||||
|
|
||||||
const exampleData = await postToSampler(sampleSpec, 'example-manifest');
|
const exampleData = await postToSampler(exampleSpec, 'example-manifest');
|
||||||
|
|
||||||
return filterMap(exampleData.data, datum => {
|
return filterMap(exampleData.data, datum => {
|
||||||
const parsed = datum.parsed;
|
const parsed = datum.parsed;
|
||||||
|
@ -658,7 +599,7 @@ export async function sampleForExampleManifests(
|
||||||
return {
|
return {
|
||||||
name: parsed.name,
|
name: parsed.name,
|
||||||
description: parsed.description,
|
description: parsed.description,
|
||||||
spec,
|
spec: upgradeSpec(spec),
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -26,10 +26,10 @@ export function computeFlattenPathsForData(
|
||||||
exprType: ExprType,
|
exprType: ExprType,
|
||||||
arrayHandling: ArrayHandling,
|
arrayHandling: ArrayHandling,
|
||||||
): FlattenField[] {
|
): FlattenField[] {
|
||||||
return computeFlattenExprsForData(data, exprType, arrayHandling).map((expr, i) => {
|
return computeFlattenExprsForData(data, exprType, arrayHandling).map(expr => {
|
||||||
return {
|
return {
|
||||||
|
name: expr.replace(/^\$?\./, ''),
|
||||||
type: exprType,
|
type: exprType,
|
||||||
name: `expr_${i}`,
|
|
||||||
expr,
|
expr,
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
|
@ -25,27 +25,20 @@ import {
|
||||||
updateSchemaWithSample,
|
updateSchemaWithSample,
|
||||||
} from './druid-type';
|
} from './druid-type';
|
||||||
import { IngestionSpec } from './ingestion-spec';
|
import { IngestionSpec } from './ingestion-spec';
|
||||||
import {
|
import { applyCache, headerFromSampleResponse } from './sampler';
|
||||||
getSamplerType,
|
|
||||||
headerFromSampleResponse,
|
|
||||||
sampleForConnect,
|
|
||||||
sampleForExampleManifests,
|
|
||||||
sampleForFilter,
|
|
||||||
sampleForParser,
|
|
||||||
sampleForSchema,
|
|
||||||
sampleForTimestamp,
|
|
||||||
sampleForTransform,
|
|
||||||
} from './sampler';
|
|
||||||
|
|
||||||
describe('test-utils', () => {
|
describe('test-utils', () => {
|
||||||
const ingestionSpec = {
|
const ingestionSpec: IngestionSpec = {
|
||||||
type: 'index_parallel',
|
type: 'index_parallel',
|
||||||
ioConfig: {
|
ioConfig: {
|
||||||
type: 'index_parallel',
|
type: 'index_parallel',
|
||||||
firehose: {
|
inputSource: {
|
||||||
type: 'http',
|
type: 'http',
|
||||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||||
},
|
},
|
||||||
|
inputFormat: {
|
||||||
|
type: 'json',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
tuningConfig: {
|
tuningConfig: {
|
||||||
type: 'index_parallel',
|
type: 'index_parallel',
|
||||||
|
@ -57,71 +50,122 @@ describe('test-utils', () => {
|
||||||
segmentGranularity: 'DAY',
|
segmentGranularity: 'DAY',
|
||||||
queryGranularity: 'HOUR',
|
queryGranularity: 'HOUR',
|
||||||
},
|
},
|
||||||
parser: {
|
timestampSpec: {
|
||||||
type: 'string',
|
column: 'timestamp',
|
||||||
parseSpec: {
|
format: 'iso',
|
||||||
format: 'json',
|
|
||||||
timestampSpec: {
|
|
||||||
column: 'timestamp',
|
|
||||||
format: 'iso',
|
|
||||||
},
|
|
||||||
dimensionsSpec: {},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
dimensionsSpec: {},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
it('spec-utils getSamplerType', () => {
|
|
||||||
expect(getSamplerType(ingestionSpec as IngestionSpec)).toMatchInlineSnapshot(`"index"`);
|
// const cacheRows: CacheRows = [{ make: 'Honda', model: 'Civic' }, { make: 'BMW', model: 'M3' }];
|
||||||
});
|
|
||||||
it('spec-utils headerFromSampleResponse', () => {
|
it('spec-utils headerFromSampleResponse', () => {
|
||||||
expect(headerFromSampleResponse({ cacheKey: 'abc123', data: [] })).toMatchInlineSnapshot(
|
expect(headerFromSampleResponse({ data: [{ input: { a: 1 }, parsed: { a: 1 } }] }))
|
||||||
`Array []`,
|
.toMatchInlineSnapshot(`
|
||||||
);
|
Array [
|
||||||
|
"a",
|
||||||
|
]
|
||||||
|
`);
|
||||||
});
|
});
|
||||||
it('spec-utils sampleForParser', () => {
|
|
||||||
|
it('spec-utils applyCache', () => {
|
||||||
expect(
|
expect(
|
||||||
sampleForParser(ingestionSpec as IngestionSpec, 'start', 'abc123'),
|
applyCache(
|
||||||
).toMatchInlineSnapshot(`Promise {}`);
|
{
|
||||||
});
|
type: 'index_parallel',
|
||||||
it('spec-utils SampleSpec', () => {
|
spec: ingestionSpec,
|
||||||
expect(sampleForConnect(ingestionSpec as IngestionSpec, 'start')).toMatchInlineSnapshot(
|
samplerConfig: {
|
||||||
`Promise {}`,
|
numRows: 500,
|
||||||
);
|
timeoutMs: 15000,
|
||||||
});
|
},
|
||||||
it('spec-utils sampleForTimestamp', () => {
|
},
|
||||||
expect(
|
[{ make: 'Honda', model: 'Accord' }, { make: 'Toyota', model: 'Prius' }],
|
||||||
sampleForTimestamp(ingestionSpec as IngestionSpec, 'start', 'abc123'),
|
),
|
||||||
).toMatchInlineSnapshot(`Promise {}`);
|
).toMatchInlineSnapshot(`
|
||||||
});
|
Object {
|
||||||
it('spec-utils sampleForTransform', () => {
|
"samplerConfig": Object {
|
||||||
expect(
|
"numRows": 500,
|
||||||
sampleForTransform(ingestionSpec as IngestionSpec, 'start', 'abc123'),
|
"timeoutMs": 15000,
|
||||||
).toMatchInlineSnapshot(`Promise {}`);
|
},
|
||||||
});
|
"spec": Object {
|
||||||
it('spec-utils sampleForFilter', () => {
|
"dataSchema": Object {
|
||||||
expect(
|
"dataSource": "wikipedia",
|
||||||
sampleForFilter(ingestionSpec as IngestionSpec, 'start', 'abc123'),
|
"dimensionsSpec": Object {},
|
||||||
).toMatchInlineSnapshot(`Promise {}`);
|
"granularitySpec": Object {
|
||||||
});
|
"queryGranularity": "HOUR",
|
||||||
it('spec-utils sampleForSchema', () => {
|
"segmentGranularity": "DAY",
|
||||||
expect(
|
"type": "uniform",
|
||||||
sampleForSchema(ingestionSpec as IngestionSpec, 'start', 'abc123'),
|
},
|
||||||
).toMatchInlineSnapshot(`Promise {}`);
|
"timestampSpec": Object {
|
||||||
});
|
"column": "timestamp",
|
||||||
it('spec-utils sampleForExampleManifests', () => {
|
"format": "iso",
|
||||||
expect(sampleForExampleManifests('abc123')).toMatchInlineSnapshot(`Promise {}`);
|
},
|
||||||
|
},
|
||||||
|
"ioConfig": Object {
|
||||||
|
"inputFormat": Object {
|
||||||
|
"type": "json",
|
||||||
|
},
|
||||||
|
"inputSource": Object {
|
||||||
|
"data": "{\\"make\\":\\"Honda\\",\\"model\\":\\"Accord\\"}
|
||||||
|
{\\"make\\":\\"Toyota\\",\\"model\\":\\"Prius\\"}",
|
||||||
|
"type": "inline",
|
||||||
|
},
|
||||||
|
"type": "index",
|
||||||
|
},
|
||||||
|
"tuningConfig": Object {
|
||||||
|
"type": "index_parallel",
|
||||||
|
},
|
||||||
|
"type": "index",
|
||||||
|
},
|
||||||
|
"type": "index",
|
||||||
|
}
|
||||||
|
`);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// it('spec-utils sampleForParser', async () => {
|
||||||
|
// expect(await sampleForParser(ingestionSpec, 'start', 'abc123')).toMatchInlineSnapshot(
|
||||||
|
// `Promise {}`,
|
||||||
|
// );
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// it('spec-utils SampleSpec', async () => {
|
||||||
|
// expect(await sampleForConnect(ingestionSpec, 'start')).toMatchInlineSnapshot(`Promise {}`);
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// it('spec-utils sampleForTimestamp', async () => {
|
||||||
|
// expect(await sampleForTimestamp(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// it('spec-utils sampleForTransform', async () => {
|
||||||
|
// expect(await sampleForTransform(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// it('spec-utils sampleForFilter', async () => {
|
||||||
|
// expect(await sampleForFilter(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// it('spec-utils sampleForSchema', async () => {
|
||||||
|
// expect(await sampleForSchema(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
// it('spec-utils sampleForExampleManifests', async () => {
|
||||||
|
// expect(await sampleForExampleManifests('some url')).toMatchInlineSnapshot();
|
||||||
|
// });
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('druid-type.ts', () => {
|
describe('druid-type.ts', () => {
|
||||||
const ingestionSpec = {
|
const ingestionSpec: IngestionSpec = {
|
||||||
type: 'index_parallel',
|
type: 'index_parallel',
|
||||||
ioConfig: {
|
ioConfig: {
|
||||||
type: 'index_parallel',
|
type: 'index_parallel',
|
||||||
firehose: {
|
inputSource: {
|
||||||
type: 'http',
|
type: 'http',
|
||||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||||
},
|
},
|
||||||
|
inputFormat: {
|
||||||
|
type: 'json',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
tuningConfig: {
|
tuningConfig: {
|
||||||
type: 'index_parallel',
|
type: 'index_parallel',
|
||||||
|
@ -133,27 +177,24 @@ describe('druid-type.ts', () => {
|
||||||
segmentGranularity: 'DAY',
|
segmentGranularity: 'DAY',
|
||||||
queryGranularity: 'HOUR',
|
queryGranularity: 'HOUR',
|
||||||
},
|
},
|
||||||
parser: {
|
timestampSpec: {
|
||||||
type: 'string',
|
column: 'timestamp',
|
||||||
parseSpec: {
|
format: 'iso',
|
||||||
format: 'json',
|
|
||||||
timestampSpec: {
|
|
||||||
column: 'timestamp',
|
|
||||||
format: 'iso',
|
|
||||||
},
|
|
||||||
dimensionsSpec: {},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
dimensionsSpec: {},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
it('spec-utils getSamplerType', () => {
|
|
||||||
|
it('spec-utils guessTypeFromSample', () => {
|
||||||
expect(guessTypeFromSample([])).toMatchInlineSnapshot(`"string"`);
|
expect(guessTypeFromSample([])).toMatchInlineSnapshot(`"string"`);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('spec-utils getColumnTypeFromHeaderAndRows', () => {
|
it('spec-utils getColumnTypeFromHeaderAndRows', () => {
|
||||||
expect(
|
expect(
|
||||||
getColumnTypeFromHeaderAndRows({ header: ['header'], rows: [] }, 'header'),
|
getColumnTypeFromHeaderAndRows({ header: ['header'], rows: [] }, 'header'),
|
||||||
).toMatchInlineSnapshot(`"string"`);
|
).toMatchInlineSnapshot(`"string"`);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('spec-utils getDimensionSpecs', () => {
|
it('spec-utils getDimensionSpecs', () => {
|
||||||
expect(getDimensionSpecs({ header: ['header'], rows: [] }, true)).toMatchInlineSnapshot(`
|
expect(getDimensionSpecs({ header: ['header'], rows: [] }, true)).toMatchInlineSnapshot(`
|
||||||
Array [
|
Array [
|
||||||
|
@ -161,6 +202,7 @@ describe('druid-type.ts', () => {
|
||||||
]
|
]
|
||||||
`);
|
`);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('spec-utils getMetricSecs', () => {
|
it('spec-utils getMetricSecs', () => {
|
||||||
expect(getMetricSecs({ header: ['header'], rows: [] })).toMatchInlineSnapshot(`
|
expect(getMetricSecs({ header: ['header'], rows: [] })).toMatchInlineSnapshot(`
|
||||||
Array [
|
Array [
|
||||||
|
@ -171,18 +213,19 @@ describe('druid-type.ts', () => {
|
||||||
]
|
]
|
||||||
`);
|
`);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('spec-utils updateSchemaWithSample', () => {
|
it('spec-utils updateSchemaWithSample', () => {
|
||||||
expect(
|
expect(
|
||||||
updateSchemaWithSample(
|
updateSchemaWithSample(ingestionSpec, { header: ['header'], rows: [] }, 'specific', true),
|
||||||
ingestionSpec as IngestionSpec,
|
|
||||||
{ header: ['header'], rows: [] },
|
|
||||||
'specific',
|
|
||||||
true,
|
|
||||||
),
|
|
||||||
).toMatchInlineSnapshot(`
|
).toMatchInlineSnapshot(`
|
||||||
Object {
|
Object {
|
||||||
"dataSchema": Object {
|
"dataSchema": Object {
|
||||||
"dataSource": "wikipedia",
|
"dataSource": "wikipedia",
|
||||||
|
"dimensionsSpec": Object {
|
||||||
|
"dimensions": Array [
|
||||||
|
"header",
|
||||||
|
],
|
||||||
|
},
|
||||||
"granularitySpec": Object {
|
"granularitySpec": Object {
|
||||||
"queryGranularity": "HOUR",
|
"queryGranularity": "HOUR",
|
||||||
"rollup": true,
|
"rollup": true,
|
||||||
|
@ -195,24 +238,16 @@ describe('druid-type.ts', () => {
|
||||||
"type": "count",
|
"type": "count",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
"parser": Object {
|
"timestampSpec": Object {
|
||||||
"parseSpec": Object {
|
"column": "timestamp",
|
||||||
"dimensionsSpec": Object {
|
"format": "iso",
|
||||||
"dimensions": Array [
|
|
||||||
"header",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
"format": "json",
|
|
||||||
"timestampSpec": Object {
|
|
||||||
"column": "timestamp",
|
|
||||||
"format": "iso",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"type": "string",
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"ioConfig": Object {
|
"ioConfig": Object {
|
||||||
"firehose": Object {
|
"inputFormat": Object {
|
||||||
|
"type": "json",
|
||||||
|
},
|
||||||
|
"inputSource": Object {
|
||||||
"type": "http",
|
"type": "http",
|
||||||
"uris": Array [
|
"uris": Array [
|
||||||
"https://static.imply.io/data/wikipedia.json.gz",
|
"https://static.imply.io/data/wikipedia.json.gz",
|
||||||
|
@ -232,9 +267,11 @@ describe('druid-query.ts', () => {
|
||||||
it('spec-utils parseHtmlError', () => {
|
it('spec-utils parseHtmlError', () => {
|
||||||
expect(parseHtmlError('<div></div>')).toMatchInlineSnapshot(`undefined`);
|
expect(parseHtmlError('<div></div>')).toMatchInlineSnapshot(`undefined`);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('spec-utils parseHtmlError', () => {
|
it('spec-utils parseHtmlError', () => {
|
||||||
expect(getDruidErrorMessage({})).toMatchInlineSnapshot(`undefined`);
|
expect(getDruidErrorMessage({})).toMatchInlineSnapshot(`undefined`);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('spec-utils parseQueryPlan', () => {
|
it('spec-utils parseQueryPlan', () => {
|
||||||
expect(parseQueryPlan('start')).toMatchInlineSnapshot(`"start"`);
|
expect(parseQueryPlan('start')).toMatchInlineSnapshot(`"start"`);
|
||||||
});
|
});
|
||||||
|
|
|
@ -514,7 +514,7 @@ exports[`tasks view matches snapshot 1`] = `
|
||||||
filtered={
|
filtered={
|
||||||
Array [
|
Array [
|
||||||
Object {
|
Object {
|
||||||
"id": "task_id",
|
"id": "group_id",
|
||||||
"value": "test",
|
"value": "test",
|
||||||
},
|
},
|
||||||
Object {
|
Object {
|
||||||
|
|
|
@ -28,7 +28,7 @@ describe('tasks view', () => {
|
||||||
const taskView = shallow(
|
const taskView = shallow(
|
||||||
<IngestionView
|
<IngestionView
|
||||||
openDialog={'test'}
|
openDialog={'test'}
|
||||||
taskId={'test'}
|
taskGroupId={'test'}
|
||||||
datasourceId={'datasource'}
|
datasourceId={'datasource'}
|
||||||
goToDatasource={() => {}}
|
goToDatasource={() => {}}
|
||||||
goToQuery={() => {}}
|
goToQuery={() => {}}
|
||||||
|
|
|
@ -102,7 +102,7 @@ interface TaskQueryResultRow {
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface IngestionViewProps {
|
export interface IngestionViewProps {
|
||||||
taskId: string | undefined;
|
taskGroupId: string | undefined;
|
||||||
datasourceId: string | undefined;
|
datasourceId: string | undefined;
|
||||||
openDialog: string | undefined;
|
openDialog: string | undefined;
|
||||||
goToDatasource: (datasource: string) => void;
|
goToDatasource: (datasource: string) => void;
|
||||||
|
@ -216,7 +216,7 @@ ORDER BY "rank" DESC, "created_time" DESC`;
|
||||||
super(props, context);
|
super(props, context);
|
||||||
|
|
||||||
const taskFilter: Filter[] = [];
|
const taskFilter: Filter[] = [];
|
||||||
if (props.taskId) taskFilter.push({ id: 'task_id', value: props.taskId });
|
if (props.taskGroupId) taskFilter.push({ id: 'group_id', value: props.taskGroupId });
|
||||||
if (props.datasourceId) taskFilter.push({ id: 'datasource', value: props.datasourceId });
|
if (props.datasourceId) taskFilter.push({ id: 'datasource', value: props.datasourceId });
|
||||||
|
|
||||||
const supervisorFilter: Filter[] = [];
|
const supervisorFilter: Filter[] = [];
|
||||||
|
|
|
@ -59,12 +59,12 @@ exports[`load data view matches snapshot 1`] = `
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
className="step-section"
|
className="step-section"
|
||||||
key="Transform and configure schema"
|
key="Transform data and configure schema"
|
||||||
>
|
>
|
||||||
<div
|
<div
|
||||||
className="step-nav-l1"
|
className="step-nav-l1"
|
||||||
>
|
>
|
||||||
Transform and configure schema
|
Transform data and configure schema
|
||||||
</div>
|
</div>
|
||||||
<Blueprint3.ButtonGroup
|
<Blueprint3.ButtonGroup
|
||||||
className="step-nav-l2"
|
className="step-nav-l2"
|
||||||
|
|
|
@ -27,7 +27,7 @@ describe('filter table', () => {
|
||||||
header: ['c1'],
|
header: ['c1'],
|
||||||
rows: [
|
rows: [
|
||||||
{
|
{
|
||||||
raw: `{"c1":"hello"}`,
|
input: { c1: 'hello' },
|
||||||
parsed: { c1: 'hello' },
|
parsed: { c1: 'hello' },
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|
|
@ -23,7 +23,7 @@ import { LoadDataView } from './load-data-view';
|
||||||
|
|
||||||
describe('load data view', () => {
|
describe('load data view', () => {
|
||||||
it('matches snapshot', () => {
|
it('matches snapshot', () => {
|
||||||
const loadDataView = shallow(<LoadDataView goToTask={() => {}} />);
|
const loadDataView = shallow(<LoadDataView goToIngestion={() => {}} />);
|
||||||
expect(loadDataView).toMatchSnapshot();
|
expect(loadDataView).toMatchSnapshot();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -27,7 +27,7 @@ describe('parse data table', () => {
|
||||||
header: ['c1'],
|
header: ['c1'],
|
||||||
rows: [
|
rows: [
|
||||||
{
|
{
|
||||||
raw: `{"c1":"hello"}`,
|
input: { c1: 'hello' },
|
||||||
parsed: { c1: 'hello' },
|
parsed: { c1: 'hello' },
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|
|
@ -22,7 +22,7 @@ import ReactTable from 'react-table';
|
||||||
|
|
||||||
import { TableCell } from '../../../components';
|
import { TableCell } from '../../../components';
|
||||||
import { TableCellUnparseable } from '../../../components/table-cell-unparseable/table-cell-unparseable';
|
import { TableCellUnparseable } from '../../../components/table-cell-unparseable/table-cell-unparseable';
|
||||||
import { caseInsensitiveContains, filterMap, parseJson } from '../../../utils';
|
import { caseInsensitiveContains, filterMap } from '../../../utils';
|
||||||
import { FlattenField } from '../../../utils/ingestion-spec';
|
import { FlattenField } from '../../../utils/ingestion-spec';
|
||||||
import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
|
import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
|
||||||
|
|
||||||
|
@ -85,20 +85,16 @@ export const ParseDataTable = React.memo(function ParseDataTable(props: ParseDat
|
||||||
};
|
};
|
||||||
})}
|
})}
|
||||||
SubComponent={rowInfo => {
|
SubComponent={rowInfo => {
|
||||||
const { raw, error } = rowInfo.original;
|
const { input, error } = rowInfo.original;
|
||||||
const parsedJson: any = parseJson(raw);
|
const inputStr = JSON.stringify(input, null, 2);
|
||||||
|
|
||||||
if (!error && parsedJson && canFlatten) {
|
if (!error && input && canFlatten) {
|
||||||
return (
|
return <pre className="parse-detail">{'Original row: ' + inputStr}</pre>;
|
||||||
<pre className="parse-detail">
|
|
||||||
{'Original row: ' + JSON.stringify(parsedJson, null, 2)}
|
|
||||||
</pre>
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
return (
|
return (
|
||||||
<div className="parse-detail">
|
<div className="parse-detail">
|
||||||
{error && <div className="parse-error">{error}</div>}
|
{error && <div className="parse-error">{error}</div>}
|
||||||
<div>{'Original row: ' + rowInfo.original.raw}</div>
|
<div>{'Original row: ' + inputStr}</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
import { render } from '@testing-library/react';
|
import { render } from '@testing-library/react';
|
||||||
import React from 'react';
|
import React from 'react';
|
||||||
|
|
||||||
import { getEmptyTimestampSpec } from '../../../utils/ingestion-spec';
|
import { getDummyTimestampSpec } from '../../../utils/ingestion-spec';
|
||||||
|
|
||||||
import { ParseTimeTable } from './parse-time-table';
|
import { ParseTimeTable } from './parse-time-table';
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ describe('parse time table', () => {
|
||||||
header: ['c1'],
|
header: ['c1'],
|
||||||
rows: [
|
rows: [
|
||||||
{
|
{
|
||||||
raw: `{"c1":"hello"}`,
|
input: { c1: 'hello' },
|
||||||
parsed: { c1: 'hello' },
|
parsed: { c1: 'hello' },
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
@ -39,7 +39,7 @@ describe('parse time table', () => {
|
||||||
<ParseTimeTable
|
<ParseTimeTable
|
||||||
sampleBundle={{
|
sampleBundle={{
|
||||||
headerAndRows: sampleData,
|
headerAndRows: sampleData,
|
||||||
timestampSpec: getEmptyTimestampSpec(),
|
timestampSpec: getDummyTimestampSpec(),
|
||||||
}}
|
}}
|
||||||
columnFilter=""
|
columnFilter=""
|
||||||
possibleTimestampColumnsOnly={false}
|
possibleTimestampColumnsOnly={false}
|
||||||
|
|
|
@ -27,7 +27,7 @@ describe('schema table', () => {
|
||||||
header: ['c1'],
|
header: ['c1'],
|
||||||
rows: [
|
rows: [
|
||||||
{
|
{
|
||||||
raw: `{"c1":"hello"}`,
|
input: { c1: 'hello' },
|
||||||
parsed: { c1: 'hello' },
|
parsed: { c1: 'hello' },
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|
|
@ -27,7 +27,7 @@ describe('transform table', () => {
|
||||||
header: ['c1'],
|
header: ['c1'],
|
||||||
rows: [
|
rows: [
|
||||||
{
|
{
|
||||||
raw: `{"c1":"hello"}`,
|
input: { c1: 'hello' },
|
||||||
parsed: { c1: 'hello' },
|
parsed: { c1: 'hello' },
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
|
|
@ -347,7 +347,7 @@ export class QueryView extends React.PureComponent<QueryViewProps, QueryViewStat
|
||||||
|
|
||||||
prettyPrintJson(): void {
|
prettyPrintJson(): void {
|
||||||
this.setState(prevState => ({
|
this.setState(prevState => ({
|
||||||
queryString: Hjson.stringify(Hjson.parse(prevState.queryString)),
|
queryString: JSON.stringify(Hjson.parse(prevState.queryString), null, 2),
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -142,7 +142,7 @@ export class RunButton extends React.PureComponent<RunButtonProps> {
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{runeMode && (
|
{runeMode && (
|
||||||
<MenuItem icon={IconNames.PRINT} text="Pretty print JSON" onClick={onPrettier} />
|
<MenuItem icon={IconNames.ALIGN_LEFT} text="Prettify JSON" onClick={onPrettier} />
|
||||||
)}
|
)}
|
||||||
</Menu>
|
</Menu>
|
||||||
);
|
);
|
||||||
|
|
Loading…
Reference in New Issue