mirror of https://github.com/apache/druid.git
Web console: improve make type preservation in ingestion configs in the data loader (#10533)
* improve validation * move to druid modals * adjust specs * oneOf * move transform * segment grans * tidy up webpack * add Partitioning * break out druid models * tidy up * rename to Expression * move druid time * cleanup * update format * better detail * fix e2e test * remove forceGuaranteedRollup from e2e tests * clean up imports * shardSpec -> shard_spec * fix css * adjust snapshot * add granularity to segments view * rename to Time span * use OFFSET in segments view query * update doc links * simplify require * more oneOf
This commit is contained in:
parent
835b328851
commit
ee61a165e3
|
@ -18,9 +18,7 @@
|
|||
|
||||
import * as playwright from 'playwright-chromium';
|
||||
|
||||
import { clickButton } from '../../util/playwright';
|
||||
import { getLabeledInput } from '../../util/playwright';
|
||||
import { setLabeledInput } from '../../util/playwright';
|
||||
import { clickButton, getLabeledInput, setLabeledInput } from '../../util/playwright';
|
||||
import { extractTable } from '../../util/table';
|
||||
import { readPartitionSpec } from '../load-data/config/partition';
|
||||
|
||||
|
@ -36,10 +34,13 @@ enum DatasourceColumn {
|
|||
SEGMENT_LOAD_DROP,
|
||||
TOTAL_DATA_SIZE,
|
||||
SEGMENT_SIZE,
|
||||
SEGMENT_GRANULARITY,
|
||||
TOTAL_ROWS,
|
||||
AVG_ROW_SIZE,
|
||||
REPLICATED_SIZE,
|
||||
COMPACTION,
|
||||
PERCENT_COMPACTED,
|
||||
LEFT_TO_BE_COMPACTED,
|
||||
RETENTION,
|
||||
ACTIONS,
|
||||
}
|
||||
|
|
|
@ -18,9 +18,7 @@
|
|||
|
||||
import * as playwright from 'playwright-chromium';
|
||||
|
||||
import { selectSuggestibleInput } from '../../../util/playwright';
|
||||
import { getLabeledInput } from '../../../util/playwright';
|
||||
import { setLabeledInput } from '../../../util/playwright';
|
||||
import { getLabeledInput, selectSuggestibleInput, setLabeledInput } from '../../../util/playwright';
|
||||
|
||||
/* tslint:disable max-classes-per-file */
|
||||
|
||||
|
@ -159,18 +157,14 @@ export interface SingleDimPartitionsSpec extends SingleDimPartitionsSpecProps {}
|
|||
* Data loader partition step configuration.
|
||||
*/
|
||||
export class PartitionConfig {
|
||||
readonly forceGuaranteedRollupText: string;
|
||||
|
||||
constructor(props: PartitionConfigProps) {
|
||||
Object.assign(this, props);
|
||||
this.forceGuaranteedRollupText = this.forceGuaranteedRollup ? 'True' : 'False';
|
||||
}
|
||||
}
|
||||
|
||||
interface PartitionConfigProps {
|
||||
readonly segmentGranularity: SegmentGranularity;
|
||||
readonly timeIntervals: string | null;
|
||||
readonly forceGuaranteedRollup: boolean | null;
|
||||
readonly partitionsSpec: PartitionsSpec | null;
|
||||
}
|
||||
|
||||
|
|
|
@ -18,10 +18,7 @@
|
|||
|
||||
import * as playwright from 'playwright-chromium';
|
||||
|
||||
import { clickButton } from '../../util/playwright';
|
||||
import { clickLabeledButton } from '../../util/playwright';
|
||||
import { setLabeledInput } from '../../util/playwright';
|
||||
import { setLabeledTextarea } from '../../util/playwright';
|
||||
import { clickButton, setLabeledInput, setLabeledTextarea } from '../../util/playwright';
|
||||
|
||||
import { ConfigureSchemaConfig } from './config/configure-schema';
|
||||
import { PartitionConfig } from './config/partition';
|
||||
|
@ -128,13 +125,8 @@ export class DataLoader {
|
|||
|
||||
private async applyPartitionConfig(partitionConfig: PartitionConfig) {
|
||||
await setLabeledInput(this.page, 'Segment granularity', partitionConfig.segmentGranularity);
|
||||
if (partitionConfig.forceGuaranteedRollup) {
|
||||
await clickLabeledButton(
|
||||
this.page,
|
||||
'Force guaranteed rollup',
|
||||
partitionConfig.forceGuaranteedRollupText,
|
||||
);
|
||||
await setLabeledTextarea(this.page, 'Time intervals', partitionConfig.timeIntervals!);
|
||||
if (partitionConfig.timeIntervals) {
|
||||
await setLabeledTextarea(this.page, 'Time intervals', partitionConfig.timeIntervals);
|
||||
}
|
||||
if (partitionConfig.partitionsSpec != null) {
|
||||
await partitionConfig.partitionsSpec.apply(this.page);
|
||||
|
|
|
@ -18,8 +18,7 @@
|
|||
|
||||
import * as playwright from 'playwright-chromium';
|
||||
|
||||
import { clickButton } from '../../util/playwright';
|
||||
import { setInput } from '../../util/playwright';
|
||||
import { clickButton, setInput } from '../../util/playwright';
|
||||
import { extractTable } from '../../util/table';
|
||||
|
||||
/**
|
||||
|
|
|
@ -68,7 +68,6 @@ describe('Reindexing from Druid', () => {
|
|||
const partitionConfig = new PartitionConfig({
|
||||
segmentGranularity: SegmentGranularity.DAY,
|
||||
timeIntervals: interval,
|
||||
forceGuaranteedRollup: true,
|
||||
partitionsSpec: new SingleDimPartitionsSpec({
|
||||
partitionDimension: 'channel',
|
||||
targetRowsPerSegment: 10_000,
|
||||
|
|
|
@ -65,7 +65,6 @@ describe('Tutorial: Loading a file', () => {
|
|||
const partitionConfig = new PartitionConfig({
|
||||
segmentGranularity: SegmentGranularity.DAY,
|
||||
timeIntervals: null,
|
||||
forceGuaranteedRollup: null,
|
||||
partitionsSpec: null,
|
||||
});
|
||||
const publishConfig = new PublishConfig({ datasourceName: datasourceName });
|
||||
|
|
|
@ -36,6 +36,7 @@ exports.SQL_KEYWORDS = [
|
|||
'ASC',
|
||||
'DESC',
|
||||
'LIMIT',
|
||||
'OFFSET',
|
||||
'UNION ALL',
|
||||
'JOIN',
|
||||
'LEFT',
|
||||
|
|
|
@ -17,10 +17,6 @@
|
|||
*/
|
||||
|
||||
.auto-form {
|
||||
.ace-solarized-dark {
|
||||
background-color: #212e37;
|
||||
}
|
||||
|
||||
// Popover in info label
|
||||
label.bp3-label {
|
||||
position: relative;
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
import { Button, ButtonGroup, FormGroup, Intent, NumericInput } from '@blueprintjs/core';
|
||||
import React from 'react';
|
||||
|
||||
import { deepDelete, deepGet, deepSet } from '../../utils/object-change';
|
||||
import { deepDelete, deepGet, deepSet } from '../../utils';
|
||||
import { ArrayInput } from '../array-input/array-input';
|
||||
import { FormGroupWithInfo } from '../form-group-with-info/form-group-with-info';
|
||||
import { IntervalInput } from '../interval-input/interval-input';
|
||||
|
@ -55,6 +55,7 @@ export interface Field<M> {
|
|||
defined?: Functor<M, boolean>;
|
||||
required?: Functor<M, boolean>;
|
||||
adjustment?: (model: M) => M;
|
||||
issueWithValue?: (value: any) => string | undefined;
|
||||
}
|
||||
|
||||
export interface AutoFormProps<M> {
|
||||
|
@ -93,6 +94,48 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
|
|||
}
|
||||
}
|
||||
|
||||
static issueWithModel<M>(model: M | undefined, fields: readonly Field<M>[]): string | undefined {
|
||||
if (typeof model === 'undefined') {
|
||||
return `model is undefined`;
|
||||
}
|
||||
|
||||
// Precompute which fields are defined because fields could be defined twice and only one should do the checking
|
||||
const definedFields: Record<string, Field<M>> = {};
|
||||
for (const field of fields) {
|
||||
const fieldDefined = AutoForm.evaluateFunctor(field.defined, model, true);
|
||||
if (fieldDefined) {
|
||||
definedFields[field.name] = field;
|
||||
}
|
||||
}
|
||||
|
||||
for (const field of fields) {
|
||||
const fieldValue = deepGet(model, field.name);
|
||||
const fieldValueDefined = typeof fieldValue !== 'undefined';
|
||||
const fieldThatIsDefined = definedFields[field.name];
|
||||
if (fieldThatIsDefined) {
|
||||
if (fieldThatIsDefined === field) {
|
||||
const fieldRequired = AutoForm.evaluateFunctor(field.required, model, false);
|
||||
if (fieldRequired) {
|
||||
if (!fieldValueDefined) {
|
||||
return `field ${field.name} is required`;
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldValueDefined && field.issueWithValue) {
|
||||
const valueIssue = field.issueWithValue(fieldValue);
|
||||
if (valueIssue) return `field ${field.name} has issue ${valueIssue}`;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// The field is undefined
|
||||
if (fieldValueDefined) {
|
||||
return `field ${field.name} is defined but it should not be`;
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
constructor(props: AutoFormProps<T>) {
|
||||
super(props);
|
||||
this.state = {};
|
||||
|
@ -274,6 +317,7 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
|
|||
onChange={(v: any) => this.fieldChange(field, v)}
|
||||
placeholder={AutoForm.evaluateFunctor(field.placeholder, model, '')}
|
||||
height={field.height}
|
||||
issueWithValue={field.issueWithValue}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -159,7 +159,7 @@ exports[`header bar matches snapshot 1`] = `
|
|||
/>
|
||||
<Blueprint3.MenuItem
|
||||
disabled={false}
|
||||
href="https://druid.apache.org/docs/0.19.0"
|
||||
href="https://druid.apache.org/docs/0.20.0"
|
||||
icon="th"
|
||||
multiline={false}
|
||||
popoverProps={Object {}}
|
||||
|
|
|
@ -41,3 +41,4 @@ export * from './table-cell/table-cell';
|
|||
export * from './table-column-selector/table-column-selector';
|
||||
export * from './timed-button/timed-button';
|
||||
export * from './view-control-bar/view-control-bar';
|
||||
export * from './form-json-selector/form-json-selector';
|
||||
|
|
|
@ -44,7 +44,9 @@ export function extractRowColumnFromHjsonError(
|
|||
|
||||
function stringifyJson(item: any): string {
|
||||
if (item != null) {
|
||||
return JSON.stringify(item, null, 2);
|
||||
const str = JSON.stringify(item, null, 2);
|
||||
if (str === '{}') return '{\n\n}'; // Very special case for an empty object to make it more beautiful
|
||||
return str;
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
|
@ -68,10 +70,11 @@ interface JsonInputProps {
|
|||
focus?: boolean;
|
||||
width?: string;
|
||||
height?: string;
|
||||
issueWithValue?: (value: any) => string | undefined;
|
||||
}
|
||||
|
||||
export const JsonInput = React.memo(function JsonInput(props: JsonInputProps) {
|
||||
const { onChange, placeholder, focus, width, height, value } = props;
|
||||
const { onChange, placeholder, focus, width, height, value, issueWithValue } = props;
|
||||
const [internalValue, setInternalValue] = useState<InternalValue>(() => ({
|
||||
value,
|
||||
stringified: stringifyJson(value),
|
||||
|
@ -102,6 +105,14 @@ export const JsonInput = React.memo(function JsonInput(props: JsonInputProps) {
|
|||
error = e;
|
||||
}
|
||||
|
||||
if (!error && issueWithValue) {
|
||||
const issue = issueWithValue(value);
|
||||
if (issue) {
|
||||
value = undefined;
|
||||
error = new Error(issue);
|
||||
}
|
||||
}
|
||||
|
||||
setInternalValue({
|
||||
value,
|
||||
error,
|
||||
|
|
|
@ -83,6 +83,7 @@ export const SuggestibleInput = React.memo(function SuggestibleInput(props: Sugg
|
|||
rightElement={
|
||||
suggestions && (
|
||||
<Popover
|
||||
boundary={'window'}
|
||||
content={
|
||||
<Menu>
|
||||
{suggestions.map(suggestion => {
|
||||
|
|
|
@ -23,7 +23,7 @@ import ReactTable, { CellInfo, Column } from 'react-table';
|
|||
|
||||
import { useQueryManager } from '../../hooks';
|
||||
import { UrlBaser } from '../../singletons/url-baser';
|
||||
import { deepGet } from '../../utils/object-change';
|
||||
import { deepGet } from '../../utils';
|
||||
import { Loader } from '../loader/loader';
|
||||
|
||||
import './supervisor-statistics-table.scss';
|
||||
|
|
|
@ -46,8 +46,4 @@
|
|||
height: 22px;
|
||||
border-top: 2px solid #6d8ea9;
|
||||
}
|
||||
|
||||
.ace-solarized-dark {
|
||||
background-color: rgba($dark-gray1, 0.5);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -77,7 +77,6 @@ export class ConsoleApplication extends React.PureComponent<
|
|||
private datasource?: string;
|
||||
private onlyUnavailable?: boolean;
|
||||
private initQuery?: string;
|
||||
private middleManager?: string;
|
||||
|
||||
constructor(props: ConsoleApplicationProps, context: any) {
|
||||
super(props, context);
|
||||
|
@ -118,7 +117,6 @@ export class ConsoleApplication extends React.PureComponent<
|
|||
this.datasource = undefined;
|
||||
this.onlyUnavailable = undefined;
|
||||
this.initQuery = undefined;
|
||||
this.middleManager = undefined;
|
||||
}, 50);
|
||||
}
|
||||
|
||||
|
@ -156,12 +154,6 @@ export class ConsoleApplication extends React.PureComponent<
|
|||
this.resetInitialsWithDelay();
|
||||
};
|
||||
|
||||
private goToMiddleManager = (middleManager: string) => {
|
||||
this.middleManager = middleManager;
|
||||
window.location.hash = 'services';
|
||||
this.resetInitialsWithDelay();
|
||||
};
|
||||
|
||||
private goToQuery = (initQuery: string) => {
|
||||
this.initQuery = initQuery;
|
||||
window.location.hash = 'query';
|
||||
|
@ -254,7 +246,6 @@ export class ConsoleApplication extends React.PureComponent<
|
|||
openDialog={this.openDialog}
|
||||
goToDatasource={this.goToDatasources}
|
||||
goToQuery={this.goToQuery}
|
||||
goToMiddleManager={this.goToMiddleManager}
|
||||
goToLoadData={this.goToLoadData}
|
||||
capabilities={capabilities}
|
||||
/>,
|
||||
|
@ -266,7 +257,6 @@ export class ConsoleApplication extends React.PureComponent<
|
|||
return this.wrapInViewContainer(
|
||||
'services',
|
||||
<ServicesView
|
||||
middleManager={this.middleManager}
|
||||
goToQuery={this.goToQuery}
|
||||
goToTask={this.goToIngestionWithTaskGroupId}
|
||||
capabilities={capabilities}
|
||||
|
|
|
@ -90,6 +90,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (dynamic partit
|
|||
"label": "Target rows per segment",
|
||||
"name": "tuningConfig.partitionsSpec.targetRowsPerSegment",
|
||||
"type": "number",
|
||||
"zeroMeansUndefined": true,
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
|
@ -104,6 +105,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (dynamic partit
|
|||
"label": "Num shards",
|
||||
"name": "tuningConfig.partitionsSpec.numShards",
|
||||
"type": "number",
|
||||
"zeroMeansUndefined": true,
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
|
@ -112,6 +114,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (dynamic partit
|
|||
</p>,
|
||||
"label": "Partition dimensions",
|
||||
"name": "tuningConfig.partitionsSpec.partitionDimensions",
|
||||
"placeholder": "(all dimensions)",
|
||||
"type": "string-array",
|
||||
},
|
||||
Object {
|
||||
|
@ -175,14 +178,14 @@ exports[`CompactionDialog matches snapshot with compactionConfig (dynamic partit
|
|||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": 1,
|
||||
"defaultValue": 10,
|
||||
"defined": [Function],
|
||||
"info": <React.Fragment>
|
||||
Maximum number of merge tasks which can be run at the same time.
|
||||
</React.Fragment>,
|
||||
"label": "Max num merge tasks",
|
||||
"label": "Total num merge tasks",
|
||||
"min": 1,
|
||||
"name": "tuningConfig.maxNumMergeTasks",
|
||||
"name": "tuningConfig.totalNumMergeTasks",
|
||||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
|
@ -327,6 +330,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (hashed partiti
|
|||
"label": "Target rows per segment",
|
||||
"name": "tuningConfig.partitionsSpec.targetRowsPerSegment",
|
||||
"type": "number",
|
||||
"zeroMeansUndefined": true,
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
|
@ -341,6 +345,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (hashed partiti
|
|||
"label": "Num shards",
|
||||
"name": "tuningConfig.partitionsSpec.numShards",
|
||||
"type": "number",
|
||||
"zeroMeansUndefined": true,
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
|
@ -349,6 +354,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (hashed partiti
|
|||
</p>,
|
||||
"label": "Partition dimensions",
|
||||
"name": "tuningConfig.partitionsSpec.partitionDimensions",
|
||||
"placeholder": "(all dimensions)",
|
||||
"type": "string-array",
|
||||
},
|
||||
Object {
|
||||
|
@ -412,14 +418,14 @@ exports[`CompactionDialog matches snapshot with compactionConfig (hashed partiti
|
|||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": 1,
|
||||
"defaultValue": 10,
|
||||
"defined": [Function],
|
||||
"info": <React.Fragment>
|
||||
Maximum number of merge tasks which can be run at the same time.
|
||||
</React.Fragment>,
|
||||
"label": "Max num merge tasks",
|
||||
"label": "Total num merge tasks",
|
||||
"min": 1,
|
||||
"name": "tuningConfig.maxNumMergeTasks",
|
||||
"name": "tuningConfig.totalNumMergeTasks",
|
||||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
|
@ -564,6 +570,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (single_dim par
|
|||
"label": "Target rows per segment",
|
||||
"name": "tuningConfig.partitionsSpec.targetRowsPerSegment",
|
||||
"type": "number",
|
||||
"zeroMeansUndefined": true,
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
|
@ -578,6 +585,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (single_dim par
|
|||
"label": "Num shards",
|
||||
"name": "tuningConfig.partitionsSpec.numShards",
|
||||
"type": "number",
|
||||
"zeroMeansUndefined": true,
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
|
@ -586,6 +594,7 @@ exports[`CompactionDialog matches snapshot with compactionConfig (single_dim par
|
|||
</p>,
|
||||
"label": "Partition dimensions",
|
||||
"name": "tuningConfig.partitionsSpec.partitionDimensions",
|
||||
"placeholder": "(all dimensions)",
|
||||
"type": "string-array",
|
||||
},
|
||||
Object {
|
||||
|
@ -649,14 +658,14 @@ exports[`CompactionDialog matches snapshot with compactionConfig (single_dim par
|
|||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": 1,
|
||||
"defaultValue": 10,
|
||||
"defined": [Function],
|
||||
"info": <React.Fragment>
|
||||
Maximum number of merge tasks which can be run at the same time.
|
||||
</React.Fragment>,
|
||||
"label": "Max num merge tasks",
|
||||
"label": "Total num merge tasks",
|
||||
"min": 1,
|
||||
"name": "tuningConfig.maxNumMergeTasks",
|
||||
"name": "tuningConfig.totalNumMergeTasks",
|
||||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
|
@ -801,6 +810,7 @@ exports[`CompactionDialog matches snapshot without compactionConfig 1`] = `
|
|||
"label": "Target rows per segment",
|
||||
"name": "tuningConfig.partitionsSpec.targetRowsPerSegment",
|
||||
"type": "number",
|
||||
"zeroMeansUndefined": true,
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
|
@ -815,6 +825,7 @@ exports[`CompactionDialog matches snapshot without compactionConfig 1`] = `
|
|||
"label": "Num shards",
|
||||
"name": "tuningConfig.partitionsSpec.numShards",
|
||||
"type": "number",
|
||||
"zeroMeansUndefined": true,
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
|
@ -823,6 +834,7 @@ exports[`CompactionDialog matches snapshot without compactionConfig 1`] = `
|
|||
</p>,
|
||||
"label": "Partition dimensions",
|
||||
"name": "tuningConfig.partitionsSpec.partitionDimensions",
|
||||
"placeholder": "(all dimensions)",
|
||||
"type": "string-array",
|
||||
},
|
||||
Object {
|
||||
|
@ -886,14 +898,14 @@ exports[`CompactionDialog matches snapshot without compactionConfig 1`] = `
|
|||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": 1,
|
||||
"defaultValue": 10,
|
||||
"defined": [Function],
|
||||
"info": <React.Fragment>
|
||||
Maximum number of merge tasks which can be run at the same time.
|
||||
</React.Fragment>,
|
||||
"label": "Max num merge tasks",
|
||||
"label": "Total num merge tasks",
|
||||
"min": 1,
|
||||
"name": "tuningConfig.maxNumMergeTasks",
|
||||
"name": "tuningConfig.totalNumMergeTasks",
|
||||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
|
|
|
@ -31,8 +31,4 @@
|
|||
flex: 1;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.ace-solarized-dark {
|
||||
background-color: #232c35;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,254 +16,14 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Button, Classes, Code, Dialog, Intent } from '@blueprintjs/core';
|
||||
import { Button, Classes, Dialog, Intent } from '@blueprintjs/core';
|
||||
import React, { useState } from 'react';
|
||||
|
||||
import { AutoForm, Field, JsonInput } from '../../components';
|
||||
import {
|
||||
FormJsonSelector,
|
||||
FormJsonTabs,
|
||||
} from '../../components/form-json-selector/form-json-selector';
|
||||
import { deepGet, deepSet } from '../../utils/object-change';
|
||||
import { AutoForm, FormJsonSelector, FormJsonTabs, JsonInput } from '../../components';
|
||||
import { COMPACTION_CONFIG_FIELDS, CompactionConfig } from '../../druid-models';
|
||||
|
||||
import './compaction-dialog.scss';
|
||||
|
||||
type CompactionConfig = Record<string, any>;
|
||||
|
||||
const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
|
||||
{
|
||||
name: 'skipOffsetFromLatest',
|
||||
type: 'string',
|
||||
defaultValue: 'P1D',
|
||||
suggestions: ['PT0H', 'PT1H', 'P1D', 'P3D'],
|
||||
info: (
|
||||
<p>
|
||||
The offset for searching segments to be compacted. Strongly recommended to set for realtime
|
||||
dataSources.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.type',
|
||||
label: 'Partitioning type',
|
||||
type: 'string',
|
||||
suggestions: ['dynamic', 'hashed', 'single_dim'],
|
||||
info: (
|
||||
<p>
|
||||
For perfect rollup, you should use either <Code>hashed</Code> (partitioning based on the
|
||||
hash of dimensions in each row) or <Code>single_dim</Code> (based on ranges of a single
|
||||
dimension). For best-effort rollup, you should use <Code>dynamic</Code>.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
// partitionsSpec type: dynamic
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.maxRowsPerSegment',
|
||||
label: 'Max rows per segment',
|
||||
type: 'number',
|
||||
defaultValue: 5000000,
|
||||
defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'dynamic',
|
||||
info: <>Determines how many rows are in each segment.</>,
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.maxTotalRows',
|
||||
label: 'Max total rows',
|
||||
type: 'number',
|
||||
defaultValue: 20000000,
|
||||
defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'dynamic',
|
||||
info: <>Total number of rows in segments waiting for being pushed.</>,
|
||||
},
|
||||
// partitionsSpec type: hashed
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.targetRowsPerSegment',
|
||||
label: 'Target rows per segment',
|
||||
type: 'number',
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.numShards'),
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
If the segments generated are a sub-optimal size for the requested partition dimensions,
|
||||
consider setting this field.
|
||||
</p>
|
||||
<p>
|
||||
A target row count for each partition. Each partition will have a row count close to the
|
||||
target assuming evenly distributed keys. Defaults to 5 million if numShards is null.
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.numShards',
|
||||
label: 'Num shards',
|
||||
type: 'number',
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
If you know the optimal number of shards and want to speed up the time it takes for
|
||||
compaction to run, set this field.
|
||||
</p>
|
||||
<p>
|
||||
Directly specify the number of shards to create. If this is specified and 'intervals' is
|
||||
specified in the granularitySpec, the index task can skip the determine
|
||||
intervals/partitions pass through the data.
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.partitionDimensions',
|
||||
label: 'Partition dimensions',
|
||||
type: 'string-array',
|
||||
defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed',
|
||||
info: <p>The dimensions to partition on. Leave blank to select all dimensions.</p>,
|
||||
},
|
||||
// partitionsSpec type: single_dim
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.partitionDimension',
|
||||
label: 'Partition dimension',
|
||||
type: 'string',
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim',
|
||||
required: true,
|
||||
info: <p>The dimension to partition on.</p>,
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.targetRowsPerSegment',
|
||||
label: 'Target rows per segment',
|
||||
type: 'number',
|
||||
zeroMeansUndefined: true,
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim' &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
|
||||
required: (t: CompactionConfig) =>
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment') &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
|
||||
info: (
|
||||
<p>
|
||||
Target number of rows to include in a partition, should be a number that targets segments of
|
||||
500MB~1GB.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.maxRowsPerSegment',
|
||||
label: 'Max rows per segment',
|
||||
type: 'number',
|
||||
zeroMeansUndefined: true,
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim' &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
|
||||
required: (t: CompactionConfig) =>
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment') &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
|
||||
info: <p>Maximum number of rows to include in a partition.</p>,
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.assumeGrouped',
|
||||
label: 'Assume grouped',
|
||||
type: 'boolean',
|
||||
defaultValue: false,
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim',
|
||||
info: (
|
||||
<p>
|
||||
Assume that input data has already been grouped on time and dimensions. Ingestion will run
|
||||
faster, but may choose sub-optimal partitions if this assumption is violated.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.maxNumConcurrentSubTasks',
|
||||
label: 'Max num concurrent sub tasks',
|
||||
type: 'number',
|
||||
defaultValue: 1,
|
||||
min: 1,
|
||||
info: (
|
||||
<>
|
||||
Maximum number of tasks which can be run at the same time. The supervisor task would spawn
|
||||
worker tasks up to maxNumConcurrentSubTasks regardless of the available task slots. If this
|
||||
value is set to 1, the supervisor task processes data ingestion on its own instead of
|
||||
spawning worker tasks. If this value is set to too large, too many worker tasks can be
|
||||
created which might block other ingestion.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'inputSegmentSizeBytes',
|
||||
type: 'number',
|
||||
defaultValue: 419430400,
|
||||
info: (
|
||||
<p>
|
||||
Maximum number of total segment bytes processed per compaction task. Since a time chunk must
|
||||
be processed in its entirety, if the segments for a particular time chunk have a total size
|
||||
in bytes greater than this parameter, compaction will not run for that time chunk. Because
|
||||
each compaction task runs with a single thread, setting this value too far above 1–2GB will
|
||||
result in compaction tasks taking an excessive amount of time.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.maxNumMergeTasks',
|
||||
label: 'Max num merge tasks',
|
||||
type: 'number',
|
||||
defaultValue: 1,
|
||||
min: 1,
|
||||
defined: (t: CompactionConfig) =>
|
||||
['hashed', 'single_dim'].includes(deepGet(t, 'tuningConfig.partitionsSpec.type')),
|
||||
info: <>Maximum number of merge tasks which can be run at the same time.</>,
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.splitHintSpec.maxInputSegmentBytesPerTask',
|
||||
label: 'Max input segment bytes per task',
|
||||
type: 'number',
|
||||
defaultValue: 500000000,
|
||||
min: 1000000,
|
||||
adjustment: (t: CompactionConfig) => deepSet(t, 'tuningConfig.splitHintSpec.type', 'segments'),
|
||||
info: (
|
||||
<>
|
||||
Maximum number of bytes of input segments to process in a single task. If a single segment
|
||||
is larger than this number, it will be processed by itself in a single task (input segments
|
||||
are never split across tasks).
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
function validCompactionConfig(compactionConfig: CompactionConfig): boolean {
|
||||
const partitionsSpecType =
|
||||
deepGet(compactionConfig, 'tuningConfig.partitionsSpec.type') || 'dynamic';
|
||||
switch (partitionsSpecType) {
|
||||
// case 'dynamic': // Nothing to check for dynamic
|
||||
case 'hashed':
|
||||
return !(
|
||||
Boolean(deepGet(compactionConfig, 'tuningConfig.partitionsSpec.targetRowsPerSegment')) &&
|
||||
Boolean(deepGet(compactionConfig, 'tuningConfig.partitionsSpec.numShards'))
|
||||
);
|
||||
break;
|
||||
case 'single_dim':
|
||||
if (!deepGet(compactionConfig, 'tuningConfig.partitionsSpec.partitionDimension')) {
|
||||
return false;
|
||||
}
|
||||
const hasTargetRowsPerSegment = Boolean(
|
||||
deepGet(compactionConfig, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
|
||||
);
|
||||
const hasMaxRowsPerSegment = Boolean(
|
||||
deepGet(compactionConfig, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
|
||||
);
|
||||
if (hasTargetRowsPerSegment === hasMaxRowsPerSegment) {
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export interface CompactionDialogProps {
|
||||
onClose: () => void;
|
||||
onSave: (compactionConfig: CompactionConfig) => void;
|
||||
|
@ -283,8 +43,9 @@ export const CompactionDialog = React.memo(function CompactionDialog(props: Comp
|
|||
},
|
||||
);
|
||||
|
||||
const issueWithCurrentConfig = AutoForm.issueWithModel(currentConfig, COMPACTION_CONFIG_FIELDS);
|
||||
function handleSubmit() {
|
||||
if (!validCompactionConfig(currentConfig)) return;
|
||||
if (issueWithCurrentConfig) return;
|
||||
onSave(currentConfig);
|
||||
}
|
||||
|
||||
|
@ -305,7 +66,12 @@ export const CompactionDialog = React.memo(function CompactionDialog(props: Comp
|
|||
onChange={m => setCurrentConfig(m)}
|
||||
/>
|
||||
) : (
|
||||
<JsonInput value={currentConfig} onChange={setCurrentConfig} height="100%" />
|
||||
<JsonInput
|
||||
value={currentConfig}
|
||||
onChange={setCurrentConfig}
|
||||
issueWithValue={value => AutoForm.issueWithModel(value, COMPACTION_CONFIG_FIELDS)}
|
||||
height="100%"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className={Classes.DIALOG_FOOTER}>
|
||||
|
@ -316,7 +82,7 @@ export const CompactionDialog = React.memo(function CompactionDialog(props: Comp
|
|||
text="Submit"
|
||||
intent={Intent.PRIMARY}
|
||||
onClick={handleSubmit}
|
||||
disabled={!validCompactionConfig(currentConfig)}
|
||||
disabled={Boolean(issueWithCurrentConfig)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -11,7 +11,7 @@ exports[`coordinator dynamic config matches snapshot 1`] = `
|
|||
Edit the coordinator dynamic configuration on the fly. For more information please refer to the
|
||||
|
||||
<Memo(ExternalLink)
|
||||
href="https://druid.apache.org/docs/0.19.0/configuration/index.html#dynamic-configuration"
|
||||
href="https://druid.apache.org/docs/0.20.0/configuration/index.html#dynamic-configuration"
|
||||
>
|
||||
documentation
|
||||
</Memo(ExternalLink)>
|
||||
|
|
|
@ -52,7 +52,7 @@ export const CoordinatorDynamicConfigDialog = React.memo(function CoordinatorDyn
|
|||
processQuery: async () => {
|
||||
try {
|
||||
const configResp = await axios.get('/druid/coordinator/v1/config');
|
||||
setDynamicConfig(configResp.data);
|
||||
setDynamicConfig(configResp.data || {});
|
||||
} catch (e) {
|
||||
AppToaster.show({
|
||||
icon: IconNames.ERROR,
|
||||
|
|
|
@ -18,8 +18,7 @@
|
|||
|
||||
import axios from 'axios';
|
||||
|
||||
import { pluralIfNeeded, queryDruidSql } from '../../utils';
|
||||
import { deepGet } from '../../utils/object-change';
|
||||
import { deepGet, pluralIfNeeded, queryDruidSql } from '../../utils';
|
||||
import { postToSampler } from '../../utils/sampler';
|
||||
|
||||
export interface CheckControls {
|
||||
|
|
|
@ -16,6 +16,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
>
|
||||
<Blueprint3.InputGroup
|
||||
disabled={false}
|
||||
intent="none"
|
||||
onChange={[Function]}
|
||||
placeholder="Enter the lookup name"
|
||||
value="test"
|
||||
|
@ -68,6 +69,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
Object {
|
||||
"adjustment": [Function],
|
||||
"name": "type",
|
||||
"required": true,
|
||||
"suggestions": Array [
|
||||
"map",
|
||||
"cachedNamespace",
|
||||
|
@ -77,7 +79,9 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
Object {
|
||||
"defined": [Function],
|
||||
"height": "60vh",
|
||||
"issueWithValue": [Function],
|
||||
"name": "map",
|
||||
"required": true,
|
||||
"type": "json",
|
||||
},
|
||||
Object {
|
||||
|
@ -85,6 +89,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
"label": "Globally cached lookup type",
|
||||
"name": "extractionNamespace.type",
|
||||
"placeholder": "uri",
|
||||
"required": true,
|
||||
"suggestions": Array [
|
||||
"uri",
|
||||
"jdbc",
|
||||
|
@ -97,52 +102,65 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
"label": "URI prefix",
|
||||
"name": "extractionNamespace.uriPrefix",
|
||||
"placeholder": "s3://bucket/some/key/prefix/",
|
||||
"required": [Function],
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "Optional regex for matching the file name under uriPrefix. Only used if uriPrefix is used",
|
||||
"info": <React.Fragment>
|
||||
<p>
|
||||
URI for the file of interest, specified as a file, hdfs, or s3 path
|
||||
</p>
|
||||
<p>
|
||||
The URI prefix option is strictly better than URI and should be used instead
|
||||
</p>
|
||||
</React.Fragment>,
|
||||
"label": "URI (deprecated)",
|
||||
"name": "extractionNamespace.uri",
|
||||
"placeholder": "s3://bucket/some/key/prefix/lookups-01.gz",
|
||||
"required": [Function],
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": ".*",
|
||||
"defined": [Function],
|
||||
"info": "Optional regex for matching the file name under uriPrefix.",
|
||||
"label": "File regex",
|
||||
"name": "extractionNamespace.fileRegex",
|
||||
"placeholder": "(optional)",
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": "csv",
|
||||
"defined": [Function],
|
||||
"label": "Format",
|
||||
"info": <React.Fragment>
|
||||
<p>
|
||||
The format of the data in the lookup files.
|
||||
</p>
|
||||
<p>
|
||||
The
|
||||
<Unknown>
|
||||
simpleJson
|
||||
</Unknown>
|
||||
lookupParseSpec does not take any parameters. It is simply a line delimited JSON file where the field is the key, and the field's value is the value.
|
||||
</p>
|
||||
</React.Fragment>,
|
||||
"label": "Parse format",
|
||||
"name": "extractionNamespace.namespaceParseSpec.format",
|
||||
"required": true,
|
||||
"suggestions": Array [
|
||||
"csv",
|
||||
"tsv",
|
||||
"customJson",
|
||||
"simpleJson",
|
||||
"customJson",
|
||||
],
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": 0,
|
||||
"defined": [Function],
|
||||
"info": "The list of columns in the csv file",
|
||||
"label": "Columns",
|
||||
"name": "extractionNamespace.namespaceParseSpec.columns",
|
||||
"placeholder": "[\\"key\\", \\"value\\"]",
|
||||
"type": "string-array",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "The name of the column containing the key",
|
||||
"label": "Key column",
|
||||
"name": "extractionNamespace.namespaceParseSpec.keyColumn",
|
||||
"placeholder": "Key",
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "The name of the column containing the value",
|
||||
"label": "Value column",
|
||||
"name": "extractionNamespace.namespaceParseSpec.valueColumn",
|
||||
"placeholder": "Value",
|
||||
"type": "string",
|
||||
"info": "Number of header rows to be skipped. The default number of header rows to be skipped is 0.",
|
||||
"label": "Skip header rows",
|
||||
"name": "extractionNamespace.namespaceParseSpec.skipHeaderRows",
|
||||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": false,
|
||||
|
@ -154,11 +172,28 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "Number of header rows to be skipped. The default number of header rows to be skipped is 0.",
|
||||
"label": "Skip header rows",
|
||||
"name": "extractionNamespace.namespaceParseSpec.skipHeaderRows",
|
||||
"placeholder": "(optional)",
|
||||
"type": "number",
|
||||
"info": "The list of columns in the csv file",
|
||||
"label": "Columns",
|
||||
"name": "extractionNamespace.namespaceParseSpec.columns",
|
||||
"placeholder": "[\\"key\\", \\"value\\"]",
|
||||
"required": [Function],
|
||||
"type": "string-array",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "The name of the column containing the key",
|
||||
"label": "Key column",
|
||||
"name": "extractionNamespace.namespaceParseSpec.keyColumn",
|
||||
"placeholder": "(optional - defaults to the first column)",
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "The name of the column containing the value",
|
||||
"label": "Value column",
|
||||
"name": "extractionNamespace.namespaceParseSpec.valueColumn",
|
||||
"placeholder": "(optional - defaults to the second column)",
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
|
@ -179,6 +214,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
"label": "Key field name",
|
||||
"name": "extractionNamespace.namespaceParseSpec.keyFieldName",
|
||||
"placeholder": "key",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
|
@ -186,6 +222,15 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
"label": "Value field name",
|
||||
"name": "extractionNamespace.namespaceParseSpec.valueFieldName",
|
||||
"placeholder": "value",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": "0",
|
||||
"defined": [Function],
|
||||
"info": "Period between polling for updates",
|
||||
"label": "Poll period",
|
||||
"name": "extractionNamespace.pollPeriod",
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
|
@ -205,20 +250,15 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
"label": "Namespace",
|
||||
"name": "extractionNamespace.namespace",
|
||||
"placeholder": "some_lookup",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "Defines the connectURI value on the The connector config to used",
|
||||
"label": "CreateTables",
|
||||
"name": "extractionNamespace.connectorConfig.createTables",
|
||||
"type": "boolean",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "Defines the connectURI value on the The connector config to used",
|
||||
"label": "Connect URI",
|
||||
"name": "extractionNamespace.connectorConfig.connectURI",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
|
@ -235,6 +275,13 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
"name": "extractionNamespace.connectorConfig.password",
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "Should tables be created",
|
||||
"label": "Create tables",
|
||||
"name": "extractionNamespace.connectorConfig.createTables",
|
||||
"type": "boolean",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": <React.Fragment>
|
||||
|
@ -252,6 +299,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
"label": "Table",
|
||||
"name": "extractionNamespace.table",
|
||||
"placeholder": "some_lookup_table",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
|
@ -271,6 +319,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
"label": "Key column",
|
||||
"name": "extractionNamespace.keyColumn",
|
||||
"placeholder": "my_key_value",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
|
@ -290,6 +339,7 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
"label": "Value column",
|
||||
"name": "extractionNamespace.valueColumn",
|
||||
"placeholder": "my_column_value",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
|
@ -325,25 +375,17 @@ exports[`LookupEditDialog matches snapshot 1`] = `
|
|||
? FROM namespace.table WHERE filter
|
||||
</p>
|
||||
</React.Fragment>,
|
||||
"label": "TsColumn",
|
||||
"label": "Timestamp column",
|
||||
"name": "extractionNamespace.tsColumn",
|
||||
"placeholder": "(optional)",
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defined": [Function],
|
||||
"info": "Period between polling for updates",
|
||||
"label": "Poll period",
|
||||
"name": "extractionNamespace.pollPeriod",
|
||||
"placeholder": "(optional)",
|
||||
"type": "string",
|
||||
},
|
||||
Object {
|
||||
"defaultValue": 0,
|
||||
"defined": [Function],
|
||||
"info": "How long to wait (in ms) for the first run of the cache to populate. 0 indicates to not wait",
|
||||
"label": "First cache timeout",
|
||||
"name": "firstCacheTimeout",
|
||||
"placeholder": "(optional)",
|
||||
"type": "number",
|
||||
},
|
||||
Object {
|
||||
|
|
|
@ -29,10 +29,6 @@
|
|||
overflow: auto;
|
||||
}
|
||||
|
||||
.ace-solarized-dark {
|
||||
background-color: #232c35;
|
||||
}
|
||||
|
||||
.ace_gutter-layer {
|
||||
background-color: #27313c;
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
import { shallow } from 'enzyme';
|
||||
import React from 'react';
|
||||
|
||||
import { isLookupSubmitDisabled, LookupEditDialog } from './lookup-edit-dialog';
|
||||
import { LookupEditDialog } from './lookup-edit-dialog';
|
||||
|
||||
describe('LookupEditDialog', () => {
|
||||
it('matches snapshot', () => {
|
||||
|
@ -40,439 +40,3 @@ describe('LookupEditDialog', () => {
|
|||
expect(lookupEditDialog).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type Map Should be disabled', () => {
|
||||
it('Missing LookupName', () => {
|
||||
expect(isLookupSubmitDisabled(undefined, 'v1', '__default', { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Empty version', () => {
|
||||
expect(isLookupSubmitDisabled('lookup', '', '__default', { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Missing version', () => {
|
||||
expect(isLookupSubmitDisabled('lookup', undefined, '__default', { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Empty tier', () => {
|
||||
expect(isLookupSubmitDisabled('lookup', 'v1', '', { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Missing tier', () => {
|
||||
expect(isLookupSubmitDisabled('lookup', 'v1', undefined, { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Missing spec', () => {
|
||||
expect(isLookupSubmitDisabled('lookup', 'v1', '__default', {})).toBe(true);
|
||||
});
|
||||
|
||||
it('Type undefined', () => {
|
||||
expect(isLookupSubmitDisabled('lookup', 'v1', '__default', { type: undefined })).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type map with no map', () => {
|
||||
expect(isLookupSubmitDisabled('lookup', 'v1', '__default', { type: 'map' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with no extractionNamespace', () => {
|
||||
expect(isLookupSubmitDisabled('lookup', 'v1', '__default', { type: 'cachedNamespace' })).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type uri, format csv, no namespaceParseSpec', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type uri, format csv, no columns and skipHeaderRows', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'csv',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type uri, format tsv, no columns', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'tsv',
|
||||
skipHeaderRows: 0,
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type customJson, format tsv, no keyFieldName', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
valueFieldName: 'value',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type customJson, format customJson, no valueFieldName', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
keyFieldName: 'key',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type cachedNamespace should be disabled', () => {
|
||||
it('No extractionNamespace', () => {
|
||||
expect(isLookupSubmitDisabled('lookup', 'v1', '__default', { type: 'cachedNamespace' })).toBe(
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
describe('ExtractionNamespace type URI', () => {
|
||||
it('Format csv, no namespaceParseSpec', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Format csv, no columns and skipHeaderRows', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'csv',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Format tsv, no columns', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'tsv',
|
||||
skipHeaderRows: 0,
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Format tsv, no keyFieldName', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
valueFieldName: 'value',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Format customJson, no valueFieldName', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
keyFieldName: 'key',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ExtractionNamespace type JDBC', () => {
|
||||
it('No namespace', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: undefined,
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: 'the_new_dim_value',
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('No connectorConfig', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'some_lookup',
|
||||
connectorConfig: undefined,
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: 'the_new_dim_value',
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('No table', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'some_lookup',
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: undefined,
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: 'the_new_dim_value',
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('No keyColumn', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'some_lookup',
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: undefined,
|
||||
valueColumn: 'the_new_dim_value',
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('No keyColumn', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'some_lookup',
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: undefined,
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type Map Should be enabled', () => {
|
||||
it('Has type and has Map', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', { type: 'map', map: { a: 'b' } }),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type cachedNamespace Should be enabled', () => {
|
||||
describe('ExtractionNamespace type URI', () => {
|
||||
it('Format csv with columns', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'csv',
|
||||
columns: ['key', 'value'],
|
||||
},
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('Format csv with skipHeaderRows', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'csv',
|
||||
skipHeaderRows: 1,
|
||||
},
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('Format tsv, only columns', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'tsv',
|
||||
columns: ['key', 'value'],
|
||||
},
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('Format tsv, keyFieldName and valueFieldName', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
valueFieldName: 'value',
|
||||
keyFieldName: 'value',
|
||||
},
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ExtractionNamespace type JDBC', () => {
|
||||
it('No namespace', () => {
|
||||
expect(
|
||||
isLookupSubmitDisabled('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'lookup',
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: 'the_new_dim_value',
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -27,56 +27,12 @@ import {
|
|||
} from '@blueprintjs/core';
|
||||
import React, { useState } from 'react';
|
||||
|
||||
import { AutoForm, Field, JsonInput } from '../../components';
|
||||
import {
|
||||
FormJsonSelector,
|
||||
FormJsonTabs,
|
||||
} from '../../components/form-json-selector/form-json-selector';
|
||||
import { AutoForm, JsonInput } from '../../components';
|
||||
import { FormJsonSelector, FormJsonTabs } from '../../components';
|
||||
import { isLookupInvalid, LOOKUP_FIELDS, LookupSpec } from '../../druid-models';
|
||||
|
||||
import './lookup-edit-dialog.scss';
|
||||
|
||||
export interface ExtractionNamespaceSpec {
|
||||
type?: string;
|
||||
uri?: string;
|
||||
uriPrefix?: string;
|
||||
fileRegex?: string;
|
||||
namespaceParseSpec?: NamespaceParseSpec;
|
||||
namespace?: string;
|
||||
connectorConfig?: {
|
||||
createTables: boolean;
|
||||
connectURI: string;
|
||||
user: string;
|
||||
password: string;
|
||||
};
|
||||
table?: string;
|
||||
keyColumn?: string;
|
||||
valueColumn?: string;
|
||||
filter?: any;
|
||||
tsColumn?: string;
|
||||
pollPeriod?: number | string;
|
||||
}
|
||||
|
||||
export interface NamespaceParseSpec {
|
||||
format: string;
|
||||
columns?: string[];
|
||||
keyColumn?: string;
|
||||
valueColumn?: string;
|
||||
hasHeaderRow?: boolean;
|
||||
skipHeaderRows?: number;
|
||||
keyFieldName?: string;
|
||||
valueFieldName?: string;
|
||||
delimiter?: string;
|
||||
listDelimiter?: string;
|
||||
}
|
||||
|
||||
export interface LookupSpec {
|
||||
type?: string;
|
||||
map?: {};
|
||||
extractionNamespace?: ExtractionNamespaceSpec;
|
||||
firstCacheTimeout?: number;
|
||||
injective?: boolean;
|
||||
}
|
||||
|
||||
export interface LookupEditDialogProps {
|
||||
onClose: () => void;
|
||||
onSubmit: (updateLookupVersion: boolean) => void;
|
||||
|
@ -89,455 +45,6 @@ export interface LookupEditDialogProps {
|
|||
allLookupTiers: string[];
|
||||
}
|
||||
|
||||
export function isLookupSubmitDisabled(
|
||||
lookupName: string | undefined,
|
||||
lookupVersion: string | undefined,
|
||||
lookupTier: string | undefined,
|
||||
lookupSpec: LookupSpec | undefined,
|
||||
) {
|
||||
let disableSubmit =
|
||||
!lookupName ||
|
||||
!lookupVersion ||
|
||||
!lookupTier ||
|
||||
!lookupSpec ||
|
||||
!lookupSpec.type ||
|
||||
(lookupSpec.type === 'map' && !lookupSpec.map) ||
|
||||
(lookupSpec.type === 'cachedNamespace' && !lookupSpec.extractionNamespace);
|
||||
|
||||
if (
|
||||
!disableSubmit &&
|
||||
lookupSpec &&
|
||||
lookupSpec.type === 'cachedNamespace' &&
|
||||
lookupSpec.extractionNamespace
|
||||
) {
|
||||
switch (lookupSpec.extractionNamespace.type) {
|
||||
case 'uri':
|
||||
const namespaceParseSpec = lookupSpec.extractionNamespace.namespaceParseSpec;
|
||||
disableSubmit = !namespaceParseSpec;
|
||||
if (!namespaceParseSpec) break;
|
||||
switch (namespaceParseSpec.format) {
|
||||
case 'csv':
|
||||
disableSubmit = !namespaceParseSpec.columns && !namespaceParseSpec.skipHeaderRows;
|
||||
break;
|
||||
case 'tsv':
|
||||
disableSubmit = !namespaceParseSpec.columns;
|
||||
break;
|
||||
case 'customJson':
|
||||
disableSubmit = !namespaceParseSpec.keyFieldName || !namespaceParseSpec.valueFieldName;
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case 'jdbc':
|
||||
const extractionNamespace = lookupSpec.extractionNamespace;
|
||||
disableSubmit =
|
||||
!extractionNamespace.namespace ||
|
||||
!extractionNamespace.connectorConfig ||
|
||||
!extractionNamespace.table ||
|
||||
!extractionNamespace.keyColumn ||
|
||||
!extractionNamespace.valueColumn;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return disableSubmit;
|
||||
}
|
||||
|
||||
const LOOKUP_FIELDS: Field<LookupSpec>[] = [
|
||||
{
|
||||
name: 'type',
|
||||
type: 'string',
|
||||
suggestions: ['map', 'cachedNamespace'],
|
||||
adjustment: (model: LookupSpec) => {
|
||||
if (model.type === 'map' && model.extractionNamespace && model.extractionNamespace.type) {
|
||||
return model;
|
||||
}
|
||||
model.extractionNamespace = { type: 'uri', namespaceParseSpec: { format: 'csv' } };
|
||||
return model;
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'map',
|
||||
type: 'json',
|
||||
height: '60vh',
|
||||
defined: (model: LookupSpec) => model.type === 'map',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.type',
|
||||
type: 'string',
|
||||
label: 'Globally cached lookup type',
|
||||
placeholder: 'uri',
|
||||
suggestions: ['uri', 'jdbc'],
|
||||
defined: (model: LookupSpec) => model.type === 'cachedNamespace',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.uriPrefix',
|
||||
type: 'string',
|
||||
label: 'URI prefix',
|
||||
info:
|
||||
'A URI which specifies a directory (or other searchable resource) in which to search for files',
|
||||
placeholder: 's3://bucket/some/key/prefix/',
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.fileRegex',
|
||||
type: 'string',
|
||||
label: 'File regex',
|
||||
placeholder: '(optional)',
|
||||
info:
|
||||
'Optional regex for matching the file name under uriPrefix. Only used if uriPrefix is used',
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.format',
|
||||
type: 'string',
|
||||
label: 'Format',
|
||||
defaultValue: 'csv',
|
||||
suggestions: ['csv', 'tsv', 'customJson', 'simpleJson'],
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri',
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.columns',
|
||||
type: 'string-array',
|
||||
label: 'Columns',
|
||||
placeholder: `["key", "value"]`,
|
||||
info: 'The list of columns in the csv file',
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri' &&
|
||||
model.extractionNamespace.namespaceParseSpec &&
|
||||
(model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
|
||||
model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.keyColumn',
|
||||
type: 'string',
|
||||
label: 'Key column',
|
||||
placeholder: 'Key',
|
||||
info: 'The name of the column containing the key',
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri' &&
|
||||
model.extractionNamespace.namespaceParseSpec &&
|
||||
(model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
|
||||
model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.valueColumn',
|
||||
type: 'string',
|
||||
label: 'Value column',
|
||||
placeholder: 'Value',
|
||||
info: 'The name of the column containing the value',
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri' &&
|
||||
model.extractionNamespace.namespaceParseSpec &&
|
||||
(model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
|
||||
model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.hasHeaderRow',
|
||||
type: 'boolean',
|
||||
label: 'Has header row',
|
||||
defaultValue: false,
|
||||
info: `A flag to indicate that column information can be extracted from the input files' header row`,
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri' &&
|
||||
model.extractionNamespace.namespaceParseSpec &&
|
||||
(model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
|
||||
model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.skipHeaderRows',
|
||||
type: 'number',
|
||||
label: 'Skip header rows',
|
||||
placeholder: '(optional)',
|
||||
info: `Number of header rows to be skipped. The default number of header rows to be skipped is 0.`,
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri' &&
|
||||
model.extractionNamespace.namespaceParseSpec &&
|
||||
(model.extractionNamespace.namespaceParseSpec.format === 'csv' ||
|
||||
model.extractionNamespace.namespaceParseSpec.format === 'tsv'),
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.delimiter',
|
||||
type: 'string',
|
||||
label: 'Delimiter',
|
||||
placeholder: `(optional)`,
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri' &&
|
||||
model.extractionNamespace.namespaceParseSpec &&
|
||||
model.extractionNamespace.namespaceParseSpec.format === 'tsv',
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.listDelimiter',
|
||||
type: 'string',
|
||||
label: 'List delimiter',
|
||||
placeholder: `(optional)`,
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri' &&
|
||||
model.extractionNamespace.namespaceParseSpec &&
|
||||
model.extractionNamespace.namespaceParseSpec.format === 'tsv',
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.keyFieldName',
|
||||
type: 'string',
|
||||
label: 'Key field name',
|
||||
placeholder: `key`,
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri' &&
|
||||
model.extractionNamespace.namespaceParseSpec &&
|
||||
model.extractionNamespace.namespaceParseSpec.format === 'customJson',
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.valueFieldName',
|
||||
type: 'string',
|
||||
label: 'Value field name',
|
||||
placeholder: `value`,
|
||||
defined: (model: LookupSpec) =>
|
||||
Boolean(
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri' &&
|
||||
model.extractionNamespace.namespaceParseSpec &&
|
||||
model.extractionNamespace.namespaceParseSpec.format === 'customJson',
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespace',
|
||||
type: 'string',
|
||||
label: 'Namespace',
|
||||
placeholder: 'some_lookup',
|
||||
info: (
|
||||
<>
|
||||
<p>The namespace value in the SQL query:</p>
|
||||
<p>
|
||||
SELECT keyColumn, valueColumn, tsColumn? FROM <strong>namespace</strong>.table WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.connectorConfig.createTables',
|
||||
type: 'boolean',
|
||||
label: 'CreateTables',
|
||||
info: 'Defines the connectURI value on the The connector config to used',
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.connectorConfig.connectURI',
|
||||
type: 'string',
|
||||
label: 'Connect URI',
|
||||
info: 'Defines the connectURI value on the The connector config to used',
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.connectorConfig.user',
|
||||
type: 'string',
|
||||
label: 'User',
|
||||
info: 'Defines the user to be used by the connector config',
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.connectorConfig.password',
|
||||
type: 'string',
|
||||
label: 'Password',
|
||||
info: 'Defines the password to be used by the connector config',
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.table',
|
||||
type: 'string',
|
||||
label: 'Table',
|
||||
placeholder: 'some_lookup_table',
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The table which contains the key value pairs. This will become the table value in the SQL
|
||||
query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT keyColumn, valueColumn, tsColumn? FROM namespace.<strong>table</strong> WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.keyColumn',
|
||||
type: 'string',
|
||||
label: 'Key column',
|
||||
placeholder: 'my_key_value',
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The column in the table which contains the keys. This will become the keyColumn value in
|
||||
the SQL query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT <strong>keyColumn</strong>, valueColumn, tsColumn? FROM namespace.table WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.valueColumn',
|
||||
type: 'string',
|
||||
label: 'Value column',
|
||||
placeholder: 'my_column_value',
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The column in table which contains the values. This will become the valueColumn value in
|
||||
the SQL query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT keyColumn, <strong>valueColumn</strong>, tsColumn? FROM namespace.table WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.filter',
|
||||
type: 'string',
|
||||
label: 'Filter',
|
||||
placeholder: '(optional)',
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The filter to be used when selecting lookups, this is used to create a where clause on
|
||||
lookup population. This will become the expression filter in the SQL query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT keyColumn, valueColumn, tsColumn? FROM namespace.table WHERE{' '}
|
||||
<strong>filter</strong>
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.tsColumn',
|
||||
type: 'string',
|
||||
label: 'TsColumn',
|
||||
placeholder: '(optional)',
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The column in table which contains when the key was updated. This will become the Value in
|
||||
the SQL query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT keyColumn, valueColumn, <strong>tsColumn</strong>? FROM namespace.table WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'jdbc',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.pollPeriod',
|
||||
type: 'string',
|
||||
label: 'Poll period',
|
||||
placeholder: '(optional)',
|
||||
info: `Period between polling for updates`,
|
||||
defined: (model: LookupSpec) =>
|
||||
model.type === 'cachedNamespace' &&
|
||||
!!model.extractionNamespace &&
|
||||
model.extractionNamespace.type === 'uri',
|
||||
},
|
||||
{
|
||||
name: 'firstCacheTimeout',
|
||||
type: 'number',
|
||||
label: 'First cache timeout',
|
||||
placeholder: '(optional)',
|
||||
info: `How long to wait (in ms) for the first run of the cache to populate. 0 indicates to not wait`,
|
||||
defined: (model: LookupSpec) => model.type === 'cachedNamespace',
|
||||
},
|
||||
{
|
||||
name: 'injective',
|
||||
type: 'boolean',
|
||||
defaultValue: false,
|
||||
info: `If the underlying map is injective (keys and values are unique) then optimizations can occur internally by setting this to true`,
|
||||
defined: (model: LookupSpec) => model.type === 'cachedNamespace',
|
||||
},
|
||||
];
|
||||
|
||||
export const LookupEditDialog = React.memo(function LookupEditDialog(props: LookupEditDialogProps) {
|
||||
const {
|
||||
onClose,
|
||||
|
@ -565,6 +72,7 @@ export const LookupEditDialog = React.memo(function LookupEditDialog(props: Look
|
|||
<InputGroup
|
||||
value={lookupName}
|
||||
onChange={(e: any) => onChange('name', e.target.value)}
|
||||
intent={lookupName ? Intent.NONE : Intent.PRIMARY}
|
||||
disabled={isEdit}
|
||||
placeholder="Enter the lookup name"
|
||||
/>
|
||||
|
@ -631,7 +139,7 @@ export const LookupEditDialog = React.memo(function LookupEditDialog(props: Look
|
|||
onClick={() => {
|
||||
onSubmit(updateVersionOnSubmit && isEdit);
|
||||
}}
|
||||
disabled={isLookupSubmitDisabled(lookupName, lookupVersion, lookupTier, lookupSpec)}
|
||||
disabled={isLookupInvalid(lookupName, lookupVersion, lookupTier, lookupSpec)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -11,7 +11,7 @@ exports[`overload dynamic config matches snapshot 1`] = `
|
|||
Edit the overlord dynamic configuration on the fly. For more information please refer to the
|
||||
|
||||
<Memo(ExternalLink)
|
||||
href="https://druid.apache.org/docs/0.19.0/configuration/index.html#overlord-dynamic-configuration"
|
||||
href="https://druid.apache.org/docs/0.20.0/configuration/index.html#overlord-dynamic-configuration"
|
||||
>
|
||||
documentation
|
||||
</Memo(ExternalLink)>
|
||||
|
|
|
@ -52,7 +52,7 @@ export const OverlordDynamicConfigDialog = React.memo(function OverlordDynamicCo
|
|||
processQuery: async () => {
|
||||
try {
|
||||
const configResp = await axios(`/druid/indexer/v1/worker`);
|
||||
setDynamicConfig(configResp.data);
|
||||
setDynamicConfig(configResp.data || {});
|
||||
} catch (e) {
|
||||
AppToaster.show({
|
||||
icon: IconNames.ERROR,
|
||||
|
|
|
@ -58,7 +58,7 @@ exports[`retention dialog matches snapshot 1`] = `
|
|||
Druid uses rules to determine what data should be retained in the cluster. The rules are evaluated in order from top to bottom. For more information please refer to the
|
||||
|
||||
<a
|
||||
href="https://druid.apache.org/docs/0.19.0/operations/rule-configuration.html"
|
||||
href="https://druid.apache.org/docs/0.20.0/operations/rule-configuration.html"
|
||||
rel="noopener noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
|
|
|
@ -25,9 +25,5 @@
|
|||
.spec-dialog-textarea {
|
||||
background-color: #232c35;
|
||||
margin-bottom: 10px;
|
||||
|
||||
.ace-solarized-dark {
|
||||
background-color: #232c35;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,8 +21,8 @@ import React, { useState } from 'react';
|
|||
import { ShowJson } from '../../components';
|
||||
import { ShowHistory } from '../../components/show-history/show-history';
|
||||
import { SupervisorStatisticsTable } from '../../components/supervisor-statistics-table/supervisor-statistics-table';
|
||||
import { deepGet } from '../../utils';
|
||||
import { BasicAction } from '../../utils/basic-action';
|
||||
import { deepGet } from '../../utils/object-change';
|
||||
import { SideButtonMetaData, TableActionDialog } from '../table-action-dialog/table-action-dialog';
|
||||
|
||||
interface SupervisorTableActionDialogProps {
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
import React, { useState } from 'react';
|
||||
|
||||
import { ShowJson, ShowLog } from '../../components';
|
||||
import { deepGet } from '../../utils';
|
||||
import { BasicAction } from '../../utils/basic-action';
|
||||
import { deepGet } from '../../utils/object-change';
|
||||
import { SideButtonMetaData, TableActionDialog } from '../table-action-dialog/table-action-dialog';
|
||||
|
||||
interface TaskTableActionDialogProps {
|
||||
|
|
|
@ -0,0 +1,232 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Code } from '@blueprintjs/core';
|
||||
import React from 'react';
|
||||
|
||||
import { Field } from '../components';
|
||||
import { deepGet, deepSet, oneOf } from '../utils';
|
||||
|
||||
export type CompactionConfig = Record<string, any>;
|
||||
|
||||
export const COMPACTION_CONFIG_FIELDS: Field<CompactionConfig>[] = [
|
||||
{
|
||||
name: 'skipOffsetFromLatest',
|
||||
type: 'string',
|
||||
defaultValue: 'P1D',
|
||||
suggestions: ['PT0H', 'PT1H', 'P1D', 'P3D'],
|
||||
info: (
|
||||
<p>
|
||||
The offset for searching segments to be compacted. Strongly recommended to set for realtime
|
||||
dataSources.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.type',
|
||||
label: 'Partitioning type',
|
||||
type: 'string',
|
||||
suggestions: ['dynamic', 'hashed', 'single_dim'],
|
||||
info: (
|
||||
<p>
|
||||
For perfect rollup, you should use either <Code>hashed</Code> (partitioning based on the
|
||||
hash of dimensions in each row) or <Code>single_dim</Code> (based on ranges of a single
|
||||
dimension). For best-effort rollup, you should use <Code>dynamic</Code>.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
// partitionsSpec type: dynamic
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.maxRowsPerSegment',
|
||||
label: 'Max rows per segment',
|
||||
type: 'number',
|
||||
defaultValue: 5000000,
|
||||
defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'dynamic',
|
||||
info: <>Determines how many rows are in each segment.</>,
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.maxTotalRows',
|
||||
label: 'Max total rows',
|
||||
type: 'number',
|
||||
defaultValue: 20000000,
|
||||
defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'dynamic',
|
||||
info: <>Total number of rows in segments waiting for being pushed.</>,
|
||||
},
|
||||
// partitionsSpec type: hashed
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.targetRowsPerSegment',
|
||||
label: 'Target rows per segment',
|
||||
type: 'number',
|
||||
zeroMeansUndefined: true,
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.numShards'),
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
If the segments generated are a sub-optimal size for the requested partition dimensions,
|
||||
consider setting this field.
|
||||
</p>
|
||||
<p>
|
||||
A target row count for each partition. Each partition will have a row count close to the
|
||||
target assuming evenly distributed keys. Defaults to 5 million if numShards is null.
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.numShards',
|
||||
label: 'Num shards',
|
||||
type: 'number',
|
||||
zeroMeansUndefined: true,
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed' &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
If you know the optimal number of shards and want to speed up the time it takes for
|
||||
compaction to run, set this field.
|
||||
</p>
|
||||
<p>
|
||||
Directly specify the number of shards to create. If this is specified and 'intervals' is
|
||||
specified in the granularitySpec, the index task can skip the determine
|
||||
intervals/partitions pass through the data.
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.partitionDimensions',
|
||||
label: 'Partition dimensions',
|
||||
type: 'string-array',
|
||||
placeholder: '(all dimensions)',
|
||||
defined: (t: CompactionConfig) => deepGet(t, 'tuningConfig.partitionsSpec.type') === 'hashed',
|
||||
info: <p>The dimensions to partition on. Leave blank to select all dimensions.</p>,
|
||||
},
|
||||
// partitionsSpec type: single_dim
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.partitionDimension',
|
||||
label: 'Partition dimension',
|
||||
type: 'string',
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim',
|
||||
required: true,
|
||||
info: <p>The dimension to partition on.</p>,
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.targetRowsPerSegment',
|
||||
label: 'Target rows per segment',
|
||||
type: 'number',
|
||||
zeroMeansUndefined: true,
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim' &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
|
||||
required: (t: CompactionConfig) =>
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment') &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
|
||||
info: (
|
||||
<p>
|
||||
Target number of rows to include in a partition, should be a number that targets segments of
|
||||
500MB~1GB.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.maxRowsPerSegment',
|
||||
label: 'Max rows per segment',
|
||||
type: 'number',
|
||||
zeroMeansUndefined: true,
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim' &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment'),
|
||||
required: (t: CompactionConfig) =>
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.targetRowsPerSegment') &&
|
||||
!deepGet(t, 'tuningConfig.partitionsSpec.maxRowsPerSegment'),
|
||||
info: <p>Maximum number of rows to include in a partition.</p>,
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.partitionsSpec.assumeGrouped',
|
||||
label: 'Assume grouped',
|
||||
type: 'boolean',
|
||||
defaultValue: false,
|
||||
defined: (t: CompactionConfig) =>
|
||||
deepGet(t, 'tuningConfig.partitionsSpec.type') === 'single_dim',
|
||||
info: (
|
||||
<p>
|
||||
Assume that input data has already been grouped on time and dimensions. Ingestion will run
|
||||
faster, but may choose sub-optimal partitions if this assumption is violated.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.maxNumConcurrentSubTasks',
|
||||
label: 'Max num concurrent sub tasks',
|
||||
type: 'number',
|
||||
defaultValue: 1,
|
||||
min: 1,
|
||||
info: (
|
||||
<>
|
||||
Maximum number of tasks which can be run at the same time. The supervisor task would spawn
|
||||
worker tasks up to maxNumConcurrentSubTasks regardless of the available task slots. If this
|
||||
value is set to 1, the supervisor task processes data ingestion on its own instead of
|
||||
spawning worker tasks. If this value is set to too large, too many worker tasks can be
|
||||
created which might block other ingestion.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'inputSegmentSizeBytes',
|
||||
type: 'number',
|
||||
defaultValue: 419430400,
|
||||
info: (
|
||||
<p>
|
||||
Maximum number of total segment bytes processed per compaction task. Since a time chunk must
|
||||
be processed in its entirety, if the segments for a particular time chunk have a total size
|
||||
in bytes greater than this parameter, compaction will not run for that time chunk. Because
|
||||
each compaction task runs with a single thread, setting this value too far above 1–2GB will
|
||||
result in compaction tasks taking an excessive amount of time.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.totalNumMergeTasks',
|
||||
label: 'Total num merge tasks',
|
||||
type: 'number',
|
||||
defaultValue: 10,
|
||||
min: 1,
|
||||
defined: (t: CompactionConfig) =>
|
||||
oneOf(deepGet(t, 'tuningConfig.partitionsSpec.type'), 'hashed', 'single_dim'),
|
||||
info: <>Maximum number of merge tasks which can be run at the same time.</>,
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.splitHintSpec.maxInputSegmentBytesPerTask',
|
||||
label: 'Max input segment bytes per task',
|
||||
type: 'number',
|
||||
defaultValue: 500000000,
|
||||
min: 1000000,
|
||||
adjustment: (t: CompactionConfig) => deepSet(t, 'tuningConfig.splitHintSpec.type', 'segments'),
|
||||
info: (
|
||||
<>
|
||||
Maximum number of bytes of input segments to process in a single task. If a single segment
|
||||
is larger than this number, it will be processed by itself in a single task (input segments
|
||||
are never split across tasks).
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
|
@ -16,14 +16,14 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { CompactionConfig } from './compaction-config';
|
||||
import {
|
||||
CompactionConfig,
|
||||
CompactionStatus,
|
||||
formatCompactionConfigAndStatus,
|
||||
zeroCompactionStatus,
|
||||
} from './compaction';
|
||||
} from './compaction-status';
|
||||
|
||||
describe('compaction', () => {
|
||||
describe('compaction status', () => {
|
||||
const BASIC_CONFIG: CompactionConfig = {};
|
||||
const ZERO_STATUS: CompactionStatus = {
|
||||
dataSource: 'tbl',
|
|
@ -16,6 +16,8 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { CompactionConfig } from './compaction-config';
|
||||
|
||||
function capitalizeFirst(str: string): string {
|
||||
return str.slice(0, 1).toUpperCase() + str.slice(1).toLowerCase();
|
||||
}
|
||||
|
@ -34,8 +36,6 @@ export interface CompactionStatus {
|
|||
intervalCountSkipped: number;
|
||||
}
|
||||
|
||||
export type CompactionConfig = Record<string, any>;
|
||||
|
||||
export function zeroCompactionStatus(compactionStatus: CompactionStatus): boolean {
|
||||
return (
|
||||
!compactionStatus.bytesAwaitingCompaction &&
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { getDimensionSpecs } from './dimension-spec';
|
||||
|
||||
describe('dimension-spec', () => {
|
||||
it('getDimensionSpecs', () => {
|
||||
expect(getDimensionSpecs({ header: ['header'], rows: [] }, {}, true)).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"header",
|
||||
]
|
||||
`);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Field } from '../components';
|
||||
import { filterMap } from '../utils';
|
||||
import { HeaderAndRows } from '../utils/sampler';
|
||||
|
||||
import { getColumnTypeFromHeaderAndRows } from './ingestion-spec';
|
||||
|
||||
export interface DimensionsSpec {
|
||||
dimensions?: (string | DimensionSpec)[];
|
||||
dimensionExclusions?: string[];
|
||||
spatialDimensions?: any[];
|
||||
}
|
||||
|
||||
export interface DimensionSpec {
|
||||
type: string;
|
||||
name: string;
|
||||
createBitmapIndex?: boolean;
|
||||
}
|
||||
|
||||
export const DIMENSION_SPEC_FIELDS: Field<DimensionSpec>[] = [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
name: 'type',
|
||||
type: 'string',
|
||||
suggestions: ['string', 'long', 'float', 'double'],
|
||||
},
|
||||
{
|
||||
name: 'createBitmapIndex',
|
||||
type: 'boolean',
|
||||
defaultValue: true,
|
||||
defined: (dimensionSpec: DimensionSpec) => dimensionSpec.type === 'string',
|
||||
},
|
||||
];
|
||||
|
||||
export function getDimensionSpecName(dimensionSpec: string | DimensionSpec): string {
|
||||
return typeof dimensionSpec === 'string' ? dimensionSpec : dimensionSpec.name;
|
||||
}
|
||||
|
||||
export function getDimensionSpecType(dimensionSpec: string | DimensionSpec): string {
|
||||
return typeof dimensionSpec === 'string' ? 'string' : dimensionSpec.type;
|
||||
}
|
||||
|
||||
export function inflateDimensionSpec(dimensionSpec: string | DimensionSpec): DimensionSpec {
|
||||
return typeof dimensionSpec === 'string'
|
||||
? { name: dimensionSpec, type: 'string' }
|
||||
: dimensionSpec;
|
||||
}
|
||||
|
||||
export function getDimensionSpecs(
|
||||
headerAndRows: HeaderAndRows,
|
||||
typeHints: Record<string, string>,
|
||||
hasRollup: boolean,
|
||||
): (string | DimensionSpec)[] {
|
||||
return filterMap(headerAndRows.header, h => {
|
||||
if (h === '__time') return;
|
||||
const type = typeHints[h] || getColumnTypeFromHeaderAndRows(headerAndRows, h);
|
||||
if (type === 'string') return h;
|
||||
if (hasRollup) return;
|
||||
return {
|
||||
type,
|
||||
name: h,
|
||||
};
|
||||
});
|
||||
}
|
|
@ -0,0 +1,121 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Field } from '../components';
|
||||
import { deepGet, EMPTY_ARRAY, oneOf } from '../utils';
|
||||
|
||||
export type DruidFilter = Record<string, any>;
|
||||
|
||||
export interface DimensionFiltersWithRest {
|
||||
dimensionFilters: DruidFilter[];
|
||||
restFilter?: DruidFilter;
|
||||
}
|
||||
|
||||
export function splitFilter(filter: DruidFilter | null): DimensionFiltersWithRest {
|
||||
const inputAndFilters: DruidFilter[] = filter
|
||||
? filter.type === 'and' && Array.isArray(filter.fields)
|
||||
? filter.fields
|
||||
: [filter]
|
||||
: EMPTY_ARRAY;
|
||||
const dimensionFilters: DruidFilter[] = inputAndFilters.filter(
|
||||
f => typeof f.dimension === 'string',
|
||||
);
|
||||
const restFilters: DruidFilter[] = inputAndFilters.filter(f => typeof f.dimension !== 'string');
|
||||
|
||||
return {
|
||||
dimensionFilters,
|
||||
restFilter: restFilters.length
|
||||
? restFilters.length > 1
|
||||
? { type: 'and', filters: restFilters }
|
||||
: restFilters[0]
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export function joinFilter(
|
||||
dimensionFiltersWithRest: DimensionFiltersWithRest,
|
||||
): DruidFilter | undefined {
|
||||
const { dimensionFilters, restFilter } = dimensionFiltersWithRest;
|
||||
let newFields = dimensionFilters || EMPTY_ARRAY;
|
||||
if (restFilter && restFilter.type) newFields = newFields.concat([restFilter]);
|
||||
|
||||
if (!newFields.length) return;
|
||||
if (newFields.length === 1) return newFields[0];
|
||||
return { type: 'and', fields: newFields };
|
||||
}
|
||||
|
||||
export const FILTER_FIELDS: Field<DruidFilter>[] = [
|
||||
{
|
||||
name: 'type',
|
||||
type: 'string',
|
||||
suggestions: ['selector', 'in', 'regex', 'like', 'not'],
|
||||
},
|
||||
{
|
||||
name: 'dimension',
|
||||
type: 'string',
|
||||
defined: (df: DruidFilter) => oneOf(df.type, 'selector', 'in', 'regex', 'like'),
|
||||
},
|
||||
{
|
||||
name: 'value',
|
||||
type: 'string',
|
||||
defined: (df: DruidFilter) => df.type === 'selector',
|
||||
},
|
||||
{
|
||||
name: 'values',
|
||||
type: 'string-array',
|
||||
defined: (df: DruidFilter) => df.type === 'in',
|
||||
},
|
||||
{
|
||||
name: 'pattern',
|
||||
type: 'string',
|
||||
defined: (df: DruidFilter) => oneOf(df.type, 'regex', 'like'),
|
||||
},
|
||||
|
||||
{
|
||||
name: 'field.type',
|
||||
label: 'Sub-filter type',
|
||||
type: 'string',
|
||||
suggestions: ['selector', 'in', 'regex', 'like'],
|
||||
defined: (df: DruidFilter) => df.type === 'not',
|
||||
},
|
||||
{
|
||||
name: 'field.dimension',
|
||||
label: 'Sub-filter dimension',
|
||||
type: 'string',
|
||||
defined: (df: DruidFilter) => df.type === 'not',
|
||||
},
|
||||
{
|
||||
name: 'field.value',
|
||||
label: 'Sub-filter value',
|
||||
type: 'string',
|
||||
defined: (df: DruidFilter) => df.type === 'not' && deepGet(df, 'field.type') === 'selector',
|
||||
},
|
||||
{
|
||||
name: 'field.values',
|
||||
label: 'Sub-filter values',
|
||||
type: 'string-array',
|
||||
defined: (df: DruidFilter) => df.type === 'not' && deepGet(df, 'field.type') === 'in',
|
||||
},
|
||||
{
|
||||
name: 'field.pattern',
|
||||
label: 'Sub-filter pattern',
|
||||
type: 'string',
|
||||
defined: (df: DruidFilter) =>
|
||||
df.type === 'not' && oneOf(deepGet(df, 'field.type'), 'regex', 'like'),
|
||||
},
|
||||
];
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { computeFlattenExprsForData } from './spec-utils';
|
||||
import { computeFlattenExprsForData } from './flatten-spec';
|
||||
|
||||
describe('spec-utils', () => {
|
||||
describe('computeFlattenExprsForData', () => {
|
|
@ -16,7 +16,50 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { FlattenField } from './ingestion-spec';
|
||||
import React from 'react';
|
||||
|
||||
import { ExternalLink, Field } from '../components';
|
||||
import { getLink } from '../links';
|
||||
import { oneOf } from '../utils';
|
||||
|
||||
export interface FlattenSpec {
|
||||
useFieldDiscovery?: boolean;
|
||||
fields?: FlattenField[];
|
||||
}
|
||||
|
||||
export interface FlattenField {
|
||||
name: string;
|
||||
type: string;
|
||||
expr: string;
|
||||
}
|
||||
|
||||
export const FLATTEN_FIELD_FIELDS: Field<FlattenField>[] = [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
placeholder: 'column_name',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'type',
|
||||
type: 'string',
|
||||
suggestions: ['path', 'jq', 'root'],
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'expr',
|
||||
type: 'string',
|
||||
placeholder: '$.thing',
|
||||
defined: (flattenField: FlattenField) => oneOf(flattenField.type, 'path', 'jq'),
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
Specify a flatten{' '}
|
||||
<ExternalLink href={`${getLink('DOCS')}/ingestion/flatten-json`}>expression</ExternalLink>.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
export type ExprType = 'path' | 'jq';
|
||||
export type ArrayHandling = 'ignore-arrays' | 'include-arrays';
|
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
export * from './compaction-config';
|
||||
export * from './compaction-status';
|
||||
export * from './lookup-spec';
|
||||
export * from './time';
|
||||
export * from './timestamp-spec';
|
||||
export * from './transform-spec';
|
||||
export * from './input-source';
|
||||
export * from './input-format';
|
||||
export * from './flatten-spec';
|
||||
export * from './filter';
|
||||
export * from './dimension-spec';
|
||||
export * from './metric-spec';
|
||||
export * from './ingestion-spec';
|
|
@ -16,7 +16,16 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { cleanSpec, downgradeSpec, guessInputFormat, upgradeSpec } from './ingestion-spec';
|
||||
import {
|
||||
cleanSpec,
|
||||
downgradeSpec,
|
||||
getColumnTypeFromHeaderAndRows,
|
||||
guessInputFormat,
|
||||
guessTypeFromSample,
|
||||
IngestionSpec,
|
||||
updateSchemaWithSample,
|
||||
upgradeSpec,
|
||||
} from './ingestion-spec';
|
||||
|
||||
describe('ingestion-spec', () => {
|
||||
const oldSpec = {
|
||||
|
@ -152,3 +161,98 @@ describe('ingestion-spec', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('spec utils', () => {
|
||||
const ingestionSpec: IngestionSpec = {
|
||||
type: 'index_parallel',
|
||||
spec: {
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
inputSource: {
|
||||
type: 'http',
|
||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||
},
|
||||
inputFormat: {
|
||||
type: 'json',
|
||||
},
|
||||
},
|
||||
tuningConfig: {
|
||||
type: 'index_parallel',
|
||||
},
|
||||
dataSchema: {
|
||||
dataSource: 'wikipedia',
|
||||
granularitySpec: {
|
||||
type: 'uniform',
|
||||
segmentGranularity: 'DAY',
|
||||
queryGranularity: 'HOUR',
|
||||
},
|
||||
timestampSpec: {
|
||||
column: 'timestamp',
|
||||
format: 'iso',
|
||||
},
|
||||
dimensionsSpec: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
it('guessTypeFromSample', () => {
|
||||
expect(guessTypeFromSample([])).toMatchInlineSnapshot(`"string"`);
|
||||
});
|
||||
|
||||
it('getColumnTypeFromHeaderAndRows', () => {
|
||||
expect(
|
||||
getColumnTypeFromHeaderAndRows({ header: ['header'], rows: [] }, 'header'),
|
||||
).toMatchInlineSnapshot(`"string"`);
|
||||
});
|
||||
|
||||
it('updateSchemaWithSample', () => {
|
||||
expect(
|
||||
updateSchemaWithSample(ingestionSpec, { header: ['header'], rows: [] }, 'specific', true),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"spec": Object {
|
||||
"dataSchema": Object {
|
||||
"dataSource": "wikipedia",
|
||||
"dimensionsSpec": Object {
|
||||
"dimensions": Array [
|
||||
"header",
|
||||
],
|
||||
},
|
||||
"granularitySpec": Object {
|
||||
"queryGranularity": "HOUR",
|
||||
"rollup": true,
|
||||
"segmentGranularity": "DAY",
|
||||
"type": "uniform",
|
||||
},
|
||||
"metricsSpec": Array [
|
||||
Object {
|
||||
"name": "count",
|
||||
"type": "count",
|
||||
},
|
||||
],
|
||||
"timestampSpec": Object {
|
||||
"column": "timestamp",
|
||||
"format": "iso",
|
||||
},
|
||||
},
|
||||
"ioConfig": Object {
|
||||
"inputFormat": Object {
|
||||
"type": "json",
|
||||
},
|
||||
"inputSource": Object {
|
||||
"type": "http",
|
||||
"uris": Array [
|
||||
"https://static.imply.io/data/wikipedia.json.gz",
|
||||
],
|
||||
},
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"tuningConfig": Object {
|
||||
"type": "index_parallel",
|
||||
},
|
||||
},
|
||||
"type": "index_parallel",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,131 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { AutoForm, ExternalLink, Field } from '../components';
|
||||
import { getLink } from '../links';
|
||||
import { oneOf } from '../utils';
|
||||
|
||||
import { FlattenSpec } from './flatten-spec';
|
||||
|
||||
export interface InputFormat {
|
||||
type: string;
|
||||
findColumnsFromHeader?: boolean;
|
||||
skipHeaderRows?: number;
|
||||
columns?: string[];
|
||||
listDelimiter?: string;
|
||||
pattern?: string;
|
||||
function?: string;
|
||||
flattenSpec?: FlattenSpec;
|
||||
keepNullColumns?: boolean;
|
||||
}
|
||||
|
||||
export const INPUT_FORMAT_FIELDS: Field<InputFormat>[] = [
|
||||
{
|
||||
name: 'type',
|
||||
label: 'Input format',
|
||||
type: 'string',
|
||||
suggestions: ['json', 'csv', 'tsv', 'regex', 'parquet', 'orc', 'avro_ocf'],
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
<p>The parser used to parse the data.</p>
|
||||
<p>
|
||||
For more information see{' '}
|
||||
<ExternalLink href={`${getLink('DOCS')}/ingestion/data-formats.html`}>
|
||||
the documentation
|
||||
</ExternalLink>
|
||||
.
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'pattern',
|
||||
type: 'string',
|
||||
required: true,
|
||||
defined: (p: InputFormat) => p.type === 'regex',
|
||||
},
|
||||
{
|
||||
name: 'function',
|
||||
type: 'string',
|
||||
required: true,
|
||||
defined: (p: InputFormat) => p.type === 'javascript',
|
||||
},
|
||||
{
|
||||
name: 'findColumnsFromHeader',
|
||||
type: 'boolean',
|
||||
required: true,
|
||||
defined: (p: InputFormat) => oneOf(p.type, 'csv', 'tsv'),
|
||||
},
|
||||
{
|
||||
name: 'skipHeaderRows',
|
||||
type: 'number',
|
||||
defaultValue: 0,
|
||||
defined: (p: InputFormat) => oneOf(p.type, 'csv', 'tsv'),
|
||||
min: 0,
|
||||
info: (
|
||||
<>
|
||||
If both skipHeaderRows and hasHeaderRow options are set, skipHeaderRows is first applied.
|
||||
For example, if you set skipHeaderRows to 2 and hasHeaderRow to true, Druid will skip the
|
||||
first two lines and then extract column information from the third line.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'columns',
|
||||
type: 'string-array',
|
||||
required: true,
|
||||
defined: (p: InputFormat) =>
|
||||
(oneOf(p.type, 'csv', 'tsv') && !p.findColumnsFromHeader) || p.type === 'regex',
|
||||
},
|
||||
{
|
||||
name: 'delimiter',
|
||||
type: 'string',
|
||||
defaultValue: '\t',
|
||||
defined: (p: InputFormat) => p.type === 'tsv',
|
||||
info: <>A custom delimiter for data values.</>,
|
||||
},
|
||||
{
|
||||
name: 'listDelimiter',
|
||||
type: 'string',
|
||||
defined: (p: InputFormat) => oneOf(p.type, 'csv', 'tsv', 'regex'),
|
||||
info: <>A custom delimiter for multi-value dimensions.</>,
|
||||
},
|
||||
{
|
||||
name: 'binaryAsString',
|
||||
type: 'boolean',
|
||||
defaultValue: false,
|
||||
defined: (p: InputFormat) => oneOf(p.type, 'parquet', 'orc', 'avro_ocf'),
|
||||
info: (
|
||||
<>
|
||||
Specifies if the binary column which is not logically marked as a string should be treated
|
||||
as a UTF-8 encoded string.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
export function issueWithInputFormat(inputFormat: InputFormat | undefined): string | undefined {
|
||||
return AutoForm.issueWithModel(inputFormat, INPUT_FORMAT_FIELDS);
|
||||
}
|
||||
|
||||
export function inputFormatCanFlatten(inputFormat: InputFormat): boolean {
|
||||
return oneOf(inputFormat.type, 'json', 'parquet', 'orc', 'avro_ocf');
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
function nonEmptyArray(a: any) {
|
||||
return Array.isArray(a) && Boolean(a.length);
|
||||
}
|
||||
|
||||
export interface InputSource {
|
||||
type: string;
|
||||
baseDir?: string;
|
||||
filter?: any;
|
||||
uris?: string[];
|
||||
prefixes?: string[];
|
||||
objects?: { bucket: string; path: string }[];
|
||||
fetchTimeout?: number;
|
||||
|
||||
// druid
|
||||
dataSource?: string;
|
||||
interval?: string;
|
||||
dimensions?: string[];
|
||||
metrics?: string[];
|
||||
maxInputSegmentBytesPerTask?: number;
|
||||
|
||||
// inline
|
||||
data?: string;
|
||||
|
||||
// hdfs
|
||||
paths?: string;
|
||||
}
|
||||
|
||||
export function issueWithInputSource(inputSource: InputSource | undefined): string | undefined {
|
||||
if (!inputSource) return 'does not exist';
|
||||
if (!inputSource.type) return 'missing a type';
|
||||
switch (inputSource.type) {
|
||||
case 'local':
|
||||
if (!inputSource.baseDir) return `must have a 'baseDir'`;
|
||||
if (!inputSource.filter) return `must have a 'filter'`;
|
||||
break;
|
||||
|
||||
case 'http':
|
||||
if (!nonEmptyArray(inputSource.uris)) {
|
||||
return 'must have at least one uri';
|
||||
}
|
||||
break;
|
||||
|
||||
case 'druid':
|
||||
if (!inputSource.dataSource) return `must have a 'dataSource'`;
|
||||
if (!inputSource.interval) return `must have an 'interval'`;
|
||||
break;
|
||||
|
||||
case 'inline':
|
||||
if (!inputSource.data) return `must have 'data'`;
|
||||
break;
|
||||
|
||||
case 's3':
|
||||
case 'azure':
|
||||
case 'google':
|
||||
if (
|
||||
!nonEmptyArray(inputSource.uris) &&
|
||||
!nonEmptyArray(inputSource.prefixes) &&
|
||||
!nonEmptyArray(inputSource.objects)
|
||||
) {
|
||||
return 'must have at least one uri or prefix or object';
|
||||
}
|
||||
break;
|
||||
|
||||
case 'hdfs':
|
||||
if (!inputSource.paths) {
|
||||
return 'must have paths';
|
||||
}
|
||||
break;
|
||||
}
|
||||
return;
|
||||
}
|
|
@ -0,0 +1,453 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { isLookupInvalid } from './lookup-spec';
|
||||
|
||||
describe('lookup-spec', () => {
|
||||
describe('Type Map Should be disabled', () => {
|
||||
it('Missing LookupName', () => {
|
||||
expect(isLookupInvalid(undefined, 'v1', '__default', { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Empty version', () => {
|
||||
expect(isLookupInvalid('lookup', '', '__default', { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Missing version', () => {
|
||||
expect(isLookupInvalid('lookup', undefined, '__default', { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Empty tier', () => {
|
||||
expect(isLookupInvalid('lookup', 'v1', '', { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Missing tier', () => {
|
||||
expect(isLookupInvalid('lookup', 'v1', undefined, { type: '' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Missing spec', () => {
|
||||
expect(isLookupInvalid('lookup', 'v1', '__default', {})).toBe(true);
|
||||
});
|
||||
|
||||
it('Type undefined', () => {
|
||||
expect(isLookupInvalid('lookup', 'v1', '__default', { type: undefined })).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type map with no map', () => {
|
||||
expect(isLookupInvalid('lookup', 'v1', '__default', { type: 'map' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with no extractionNamespace', () => {
|
||||
expect(isLookupInvalid('lookup', 'v1', '__default', { type: 'cachedNamespace' })).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type uri, format csv, no namespaceParseSpec', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type uri, format csv, no columns and no hasHeaderRow', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'csv',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type uri, format tsv, no columns', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'tsv',
|
||||
skipHeaderRows: 0,
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type customJson, format tsv, no keyFieldName', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
valueFieldName: 'value',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Lookup of type cachedNamespace with extractionNamespace type customJson, format customJson, no valueFieldName', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
keyFieldName: 'key',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type cachedNamespace should be disabled', () => {
|
||||
it('No extractionNamespace', () => {
|
||||
expect(isLookupInvalid('lookup', 'v1', '__default', { type: 'cachedNamespace' })).toBe(true);
|
||||
});
|
||||
|
||||
describe('ExtractionNamespace type URI', () => {
|
||||
it('Format csv, no namespaceParseSpec', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Format csv, no columns and skipHeaderRows', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'csv',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Format tsv, no columns', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'tsv',
|
||||
skipHeaderRows: 0,
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Format tsv, no keyFieldName', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
valueFieldName: 'value',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('Format customJson, no valueFieldName', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
keyFieldName: 'key',
|
||||
},
|
||||
pollPeriod: 'PT5M',
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ExtractionNamespace type JDBC', () => {
|
||||
it('No namespace', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: undefined,
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: 'the_new_dim_value',
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('No connectorConfig', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'some_lookup',
|
||||
connectorConfig: undefined,
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: 'the_new_dim_value',
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('No table', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'some_lookup',
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: undefined,
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: 'the_new_dim_value',
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('No keyColumn', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'some_lookup',
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: undefined,
|
||||
valueColumn: 'the_new_dim_value',
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it('No keyColumn', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'some_lookup',
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: undefined,
|
||||
tsColumn: 'timestamp_column',
|
||||
pollPeriod: 600000,
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type Map Should be enabled', () => {
|
||||
it('Has type and has Map', () => {
|
||||
expect(isLookupInvalid('lookup', 'v1', '__default', { type: 'map', map: { a: 'b' } })).toBe(
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Type cachedNamespace Should be enabled', () => {
|
||||
describe('ExtractionNamespace type URI', () => {
|
||||
it('Format csv with columns', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'csv',
|
||||
columns: ['key', 'value'],
|
||||
},
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('Format csv with hasHeaderRow', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'csv',
|
||||
hasHeaderRow: true,
|
||||
},
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('Format tsv, only columns', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'tsv',
|
||||
columns: ['key', 'value'],
|
||||
},
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it('Format tsv, keyFieldName and valueFieldName', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'uri',
|
||||
uriPrefix: 's3://bucket/some/key/prefix/',
|
||||
fileRegex: 'renames-[0-9]*\\.gz',
|
||||
namespaceParseSpec: {
|
||||
format: 'customJson',
|
||||
valueFieldName: 'value',
|
||||
keyFieldName: 'value',
|
||||
},
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ExtractionNamespace type JDBC', () => {
|
||||
it('No namespace', () => {
|
||||
expect(
|
||||
isLookupInvalid('lookup', 'v1', '__default', {
|
||||
type: 'cachedNamespace',
|
||||
extractionNamespace: {
|
||||
type: 'jdbc',
|
||||
namespace: 'lookup',
|
||||
connectorConfig: {
|
||||
createTables: true,
|
||||
connectURI: 'jdbc:mysql://localhost:3306/druid',
|
||||
user: 'druid',
|
||||
password: 'diurd',
|
||||
},
|
||||
table: 'some_lookup_table',
|
||||
keyColumn: 'the_old_dim_value',
|
||||
valueColumn: 'the_new_dim_value',
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,456 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Code } from '@blueprintjs/core';
|
||||
import React from 'react';
|
||||
|
||||
import { AutoForm, Field } from '../components';
|
||||
import { deepGet, deepSet, oneOf } from '../utils';
|
||||
|
||||
export interface ExtractionNamespaceSpec {
|
||||
type?: string;
|
||||
uri?: string;
|
||||
uriPrefix?: string;
|
||||
fileRegex?: string;
|
||||
namespaceParseSpec?: NamespaceParseSpec;
|
||||
namespace?: string;
|
||||
connectorConfig?: {
|
||||
createTables: boolean;
|
||||
connectURI: string;
|
||||
user: string;
|
||||
password: string;
|
||||
};
|
||||
table?: string;
|
||||
keyColumn?: string;
|
||||
valueColumn?: string;
|
||||
filter?: any;
|
||||
tsColumn?: string;
|
||||
pollPeriod?: number | string;
|
||||
}
|
||||
|
||||
export interface NamespaceParseSpec {
|
||||
format: string;
|
||||
columns?: string[];
|
||||
keyColumn?: string;
|
||||
valueColumn?: string;
|
||||
hasHeaderRow?: boolean;
|
||||
skipHeaderRows?: number;
|
||||
keyFieldName?: string;
|
||||
valueFieldName?: string;
|
||||
delimiter?: string;
|
||||
listDelimiter?: string;
|
||||
}
|
||||
|
||||
export interface LookupSpec {
|
||||
type?: string;
|
||||
map?: Record<string, string | number>;
|
||||
extractionNamespace?: ExtractionNamespaceSpec;
|
||||
firstCacheTimeout?: number;
|
||||
injective?: boolean;
|
||||
}
|
||||
|
||||
export const LOOKUP_FIELDS: Field<LookupSpec>[] = [
|
||||
{
|
||||
name: 'type',
|
||||
type: 'string',
|
||||
suggestions: ['map', 'cachedNamespace'],
|
||||
required: true,
|
||||
adjustment: (model: LookupSpec) => {
|
||||
if (model.type === 'map' && !model.map) {
|
||||
return deepSet(model, 'map', {});
|
||||
}
|
||||
if (model.type === 'cachedNamespace' && !deepGet(model, 'extractionNamespace.type')) {
|
||||
return deepSet(model, 'extractionNamespace', { type: 'uri' });
|
||||
}
|
||||
return model;
|
||||
},
|
||||
},
|
||||
|
||||
// map lookups are simple
|
||||
{
|
||||
name: 'map',
|
||||
type: 'json',
|
||||
height: '60vh',
|
||||
defined: (model: LookupSpec) => model.type === 'map',
|
||||
required: true,
|
||||
issueWithValue: value => {
|
||||
if (!value) return 'map must be defined';
|
||||
if (typeof value !== 'object') return `map must be an object`;
|
||||
for (const k in value) {
|
||||
const typeValue = typeof value[k];
|
||||
if (typeValue !== 'string' && typeValue !== 'number') {
|
||||
return `map key '${k}' is of the wrong type '${typeValue}'`;
|
||||
}
|
||||
}
|
||||
return;
|
||||
},
|
||||
},
|
||||
|
||||
// cachedNamespace lookups have more options
|
||||
{
|
||||
name: 'extractionNamespace.type',
|
||||
type: 'string',
|
||||
label: 'Globally cached lookup type',
|
||||
placeholder: 'uri',
|
||||
suggestions: ['uri', 'jdbc'],
|
||||
defined: (model: LookupSpec) => model.type === 'cachedNamespace',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.uriPrefix',
|
||||
type: 'string',
|
||||
label: 'URI prefix',
|
||||
placeholder: 's3://bucket/some/key/prefix/',
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
!deepGet(model, 'extractionNamespace.uri'),
|
||||
required: (model: LookupSpec) =>
|
||||
!deepGet(model, 'extractionNamespace.uriPrefix') &&
|
||||
!deepGet(model, 'extractionNamespace.uri'),
|
||||
info:
|
||||
'A URI which specifies a directory (or other searchable resource) in which to search for files',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.uri',
|
||||
type: 'string',
|
||||
label: 'URI (deprecated)',
|
||||
placeholder: 's3://bucket/some/key/prefix/lookups-01.gz',
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
!deepGet(model, 'extractionNamespace.uriPrefix'),
|
||||
required: (model: LookupSpec) =>
|
||||
!deepGet(model, 'extractionNamespace.uriPrefix') &&
|
||||
!deepGet(model, 'extractionNamespace.uri'),
|
||||
info: (
|
||||
<>
|
||||
<p>URI for the file of interest, specified as a file, hdfs, or s3 path</p>
|
||||
<p>The URI prefix option is strictly better than URI and should be used instead</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.fileRegex',
|
||||
type: 'string',
|
||||
label: 'File regex',
|
||||
defaultValue: '.*',
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
Boolean(deepGet(model, 'extractionNamespace.uriPrefix')),
|
||||
info: 'Optional regex for matching the file name under uriPrefix.',
|
||||
},
|
||||
|
||||
// namespaceParseSpec
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.format',
|
||||
type: 'string',
|
||||
label: 'Parse format',
|
||||
suggestions: ['csv', 'tsv', 'simpleJson', 'customJson'],
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'uri',
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
<p>The format of the data in the lookup files.</p>
|
||||
<p>
|
||||
The <Code>simpleJson</Code> lookupParseSpec does not take any parameters. It is simply a
|
||||
line delimited JSON file where the field is the key, and the field's value is the value.
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
|
||||
// CSV + TSV
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.skipHeaderRows',
|
||||
type: 'number',
|
||||
label: 'Skip header rows',
|
||||
defaultValue: 0,
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
|
||||
info: `Number of header rows to be skipped. The default number of header rows to be skipped is 0.`,
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.hasHeaderRow',
|
||||
type: 'boolean',
|
||||
label: 'Has header row',
|
||||
defaultValue: false,
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
|
||||
info: `A flag to indicate that column information can be extracted from the input files' header row`,
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.columns',
|
||||
type: 'string-array',
|
||||
label: 'Columns',
|
||||
placeholder: `["key", "value"]`,
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
|
||||
required: (model: LookupSpec) =>
|
||||
!deepGet(model, 'extractionNamespace.namespaceParseSpec.hasHeaderRow'),
|
||||
info: 'The list of columns in the csv file',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.keyColumn',
|
||||
type: 'string',
|
||||
label: 'Key column',
|
||||
placeholder: '(optional - defaults to the first column)',
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
|
||||
info: 'The name of the column containing the key',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.valueColumn',
|
||||
type: 'string',
|
||||
label: 'Value column',
|
||||
placeholder: '(optional - defaults to the second column)',
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
oneOf(deepGet(model, 'extractionNamespace.namespaceParseSpec.format'), 'csv', 'tsv'),
|
||||
info: 'The name of the column containing the value',
|
||||
},
|
||||
|
||||
// TSV only
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.delimiter',
|
||||
type: 'string',
|
||||
label: 'Delimiter',
|
||||
placeholder: `(optional)`,
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
deepGet(model, 'extractionNamespace.namespaceParseSpec.format') === 'tsv',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.listDelimiter',
|
||||
type: 'string',
|
||||
label: 'List delimiter',
|
||||
placeholder: `(optional)`,
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
deepGet(model, 'extractionNamespace.namespaceParseSpec.format') === 'tsv',
|
||||
},
|
||||
|
||||
// Custom JSON
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.keyFieldName',
|
||||
type: 'string',
|
||||
label: 'Key field name',
|
||||
placeholder: `key`,
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
deepGet(model, 'extractionNamespace.namespaceParseSpec.format') === 'customJson',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.namespaceParseSpec.valueFieldName',
|
||||
type: 'string',
|
||||
label: 'Value field name',
|
||||
placeholder: `value`,
|
||||
defined: (model: LookupSpec) =>
|
||||
deepGet(model, 'extractionNamespace.type') === 'uri' &&
|
||||
deepGet(model, 'extractionNamespace.namespaceParseSpec.format') === 'customJson',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.pollPeriod',
|
||||
type: 'string',
|
||||
label: 'Poll period',
|
||||
defaultValue: '0',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'uri',
|
||||
info: `Period between polling for updates`,
|
||||
},
|
||||
|
||||
// JDBC stuff
|
||||
{
|
||||
name: 'extractionNamespace.namespace',
|
||||
type: 'string',
|
||||
label: 'Namespace',
|
||||
placeholder: 'some_lookup',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
<p>The namespace value in the SQL query:</p>
|
||||
<p>
|
||||
SELECT keyColumn, valueColumn, tsColumn? FROM <strong>namespace</strong>.table WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.connectorConfig.connectURI',
|
||||
type: 'string',
|
||||
label: 'Connect URI',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
required: true,
|
||||
info: 'Defines the connectURI value on the The connector config to used',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.connectorConfig.user',
|
||||
type: 'string',
|
||||
label: 'User',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
info: 'Defines the user to be used by the connector config',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.connectorConfig.password',
|
||||
type: 'string',
|
||||
label: 'Password',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
info: 'Defines the password to be used by the connector config',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.connectorConfig.createTables',
|
||||
type: 'boolean',
|
||||
label: 'Create tables',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
info: 'Should tables be created',
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.table',
|
||||
type: 'string',
|
||||
label: 'Table',
|
||||
placeholder: 'some_lookup_table',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The table which contains the key value pairs. This will become the table value in the SQL
|
||||
query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT keyColumn, valueColumn, tsColumn? FROM namespace.<strong>table</strong> WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.keyColumn',
|
||||
type: 'string',
|
||||
label: 'Key column',
|
||||
placeholder: 'my_key_value',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The column in the table which contains the keys. This will become the keyColumn value in
|
||||
the SQL query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT <strong>keyColumn</strong>, valueColumn, tsColumn? FROM namespace.table WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.valueColumn',
|
||||
type: 'string',
|
||||
label: 'Value column',
|
||||
placeholder: 'my_column_value',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The column in table which contains the values. This will become the valueColumn value in
|
||||
the SQL query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT keyColumn, <strong>valueColumn</strong>, tsColumn? FROM namespace.table WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.filter',
|
||||
type: 'string',
|
||||
label: 'Filter',
|
||||
placeholder: '(optional)',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The filter to be used when selecting lookups, this is used to create a where clause on
|
||||
lookup population. This will become the expression filter in the SQL query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT keyColumn, valueColumn, tsColumn? FROM namespace.table WHERE{' '}
|
||||
<strong>filter</strong>
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'extractionNamespace.tsColumn',
|
||||
type: 'string',
|
||||
label: 'Timestamp column',
|
||||
placeholder: '(optional)',
|
||||
defined: (model: LookupSpec) => deepGet(model, 'extractionNamespace.type') === 'jdbc',
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
The column in table which contains when the key was updated. This will become the Value in
|
||||
the SQL query:
|
||||
</p>
|
||||
<p>
|
||||
SELECT keyColumn, valueColumn, <strong>tsColumn</strong>? FROM namespace.table WHERE
|
||||
filter
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
|
||||
// Extra cachedNamespace things
|
||||
{
|
||||
name: 'firstCacheTimeout',
|
||||
type: 'number',
|
||||
label: 'First cache timeout',
|
||||
defaultValue: 0,
|
||||
defined: (model: LookupSpec) => model.type === 'cachedNamespace',
|
||||
info: `How long to wait (in ms) for the first run of the cache to populate. 0 indicates to not wait`,
|
||||
},
|
||||
{
|
||||
name: 'injective',
|
||||
type: 'boolean',
|
||||
defaultValue: false,
|
||||
defined: (model: LookupSpec) => model.type === 'cachedNamespace',
|
||||
info: `If the underlying map is injective (keys and values are unique) then optimizations can occur internally by setting this to true`,
|
||||
},
|
||||
];
|
||||
|
||||
export function isLookupInvalid(
|
||||
lookupName: string | undefined,
|
||||
lookupVersion: string | undefined,
|
||||
lookupTier: string | undefined,
|
||||
lookupSpec: LookupSpec | undefined,
|
||||
) {
|
||||
return (
|
||||
!lookupName ||
|
||||
!lookupVersion ||
|
||||
!lookupTier ||
|
||||
Boolean(AutoForm.issueWithModel(lookupSpec, LOOKUP_FIELDS))
|
||||
);
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { getMetricSpecs } from './metric-spec';
|
||||
|
||||
describe('metric-spec', () => {
|
||||
it('getMetricSecs', () => {
|
||||
expect(getMetricSpecs({ header: ['header'], rows: [] }, {})).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"name": "count",
|
||||
"type": "count",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,347 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Code } from '@blueprintjs/core';
|
||||
import React from 'react';
|
||||
|
||||
import { ExternalLink, Field } from '../components';
|
||||
import { getLink } from '../links';
|
||||
import { filterMap, oneOf } from '../utils';
|
||||
import { HeaderAndRows } from '../utils/sampler';
|
||||
|
||||
import { getColumnTypeFromHeaderAndRows } from './ingestion-spec';
|
||||
|
||||
export interface MetricSpec {
|
||||
type: string;
|
||||
name?: string;
|
||||
fieldName?: string;
|
||||
maxStringBytes?: number;
|
||||
filterNullValues?: boolean;
|
||||
fieldNames?: string[];
|
||||
fnAggregate?: string;
|
||||
fnCombine?: string;
|
||||
fnReset?: string;
|
||||
fields?: string[];
|
||||
byRow?: boolean;
|
||||
round?: boolean;
|
||||
isInputHyperUnique?: boolean;
|
||||
filter?: any;
|
||||
aggregator?: MetricSpec;
|
||||
}
|
||||
|
||||
export const METRIC_SPEC_FIELDS: Field<MetricSpec>[] = [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
info: <>The metric name as it will appear in Druid.</>,
|
||||
},
|
||||
{
|
||||
name: 'type',
|
||||
type: 'string',
|
||||
suggestions: [
|
||||
'count',
|
||||
{
|
||||
group: 'sum',
|
||||
suggestions: ['longSum', 'doubleSum', 'floatSum'],
|
||||
},
|
||||
{
|
||||
group: 'min',
|
||||
suggestions: ['longMin', 'doubleMin', 'floatMin'],
|
||||
},
|
||||
{
|
||||
group: 'max',
|
||||
suggestions: ['longMax', 'doubleMax', 'floatMax'],
|
||||
},
|
||||
{
|
||||
group: 'first',
|
||||
suggestions: ['longFirst', 'doubleFirst', 'floatFirst'],
|
||||
},
|
||||
{
|
||||
group: 'last',
|
||||
suggestions: ['longLast', 'doubleLast', 'floatLast'],
|
||||
},
|
||||
'thetaSketch',
|
||||
{
|
||||
group: 'HLLSketch',
|
||||
suggestions: ['HLLSketchBuild', 'HLLSketchMerge'],
|
||||
},
|
||||
'quantilesDoublesSketch',
|
||||
'momentSketch',
|
||||
'fixedBucketsHistogram',
|
||||
'hyperUnique',
|
||||
'filtered',
|
||||
],
|
||||
info: <>The aggregation function to apply.</>,
|
||||
},
|
||||
{
|
||||
name: 'fieldName',
|
||||
type: 'string',
|
||||
defined: m => m.type !== 'filtered',
|
||||
info: <>The column name for the aggregator to operate on.</>,
|
||||
},
|
||||
{
|
||||
name: 'maxStringBytes',
|
||||
type: 'number',
|
||||
defaultValue: 1024,
|
||||
defined: m => {
|
||||
return oneOf(m.type, 'stringFirst', 'stringLast');
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'filterNullValues',
|
||||
type: 'boolean',
|
||||
defaultValue: false,
|
||||
defined: m => {
|
||||
return oneOf(m.type, 'stringFirst', 'stringLast');
|
||||
},
|
||||
},
|
||||
// filtered
|
||||
{
|
||||
name: 'filter',
|
||||
type: 'json',
|
||||
defined: m => m.type === 'filtered',
|
||||
},
|
||||
{
|
||||
name: 'aggregator',
|
||||
type: 'json',
|
||||
defined: m => m.type === 'filtered',
|
||||
},
|
||||
// thetaSketch
|
||||
{
|
||||
name: 'size',
|
||||
type: 'number',
|
||||
defined: m => m.type === 'thetaSketch',
|
||||
defaultValue: 16384,
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
Must be a power of 2. Internally, size refers to the maximum number of entries sketch
|
||||
object will retain. Higher size means higher accuracy but more space to store sketches.
|
||||
Note that after you index with a particular size, druid will persist sketch in segments
|
||||
and you will use size greater or equal to that at query time.
|
||||
</p>
|
||||
<p>
|
||||
See the{' '}
|
||||
<ExternalLink href="https://datasketches.apache.org/docs/Theta/ThetaSize.html">
|
||||
DataSketches site
|
||||
</ExternalLink>{' '}
|
||||
for details.
|
||||
</p>
|
||||
<p>In general, We recommend just sticking to default size.</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'isInputThetaSketch',
|
||||
type: 'boolean',
|
||||
defined: m => m.type === 'thetaSketch',
|
||||
defaultValue: false,
|
||||
info: (
|
||||
<>
|
||||
This should only be used at indexing time if your input data contains theta sketch objects.
|
||||
This would be the case if you use datasketches library outside of Druid, say with Pig/Hive,
|
||||
to produce the data that you are ingesting into Druid
|
||||
</>
|
||||
),
|
||||
},
|
||||
// HLLSketchBuild & HLLSketchMerge
|
||||
{
|
||||
name: 'lgK',
|
||||
type: 'number',
|
||||
defined: m => oneOf(m.type, 'HLLSketchBuild', 'HLLSketchMerge'),
|
||||
defaultValue: 12,
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
log2 of K that is the number of buckets in the sketch, parameter that controls the size
|
||||
and the accuracy.
|
||||
</p>
|
||||
<p>Must be between 4 to 21 inclusively.</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'tgtHllType',
|
||||
type: 'string',
|
||||
defined: m => oneOf(m.type, 'HLLSketchBuild', 'HLLSketchMerge'),
|
||||
defaultValue: 'HLL_4',
|
||||
suggestions: ['HLL_4', 'HLL_6', 'HLL_8'],
|
||||
info: (
|
||||
<>
|
||||
The type of the target HLL sketch. Must be <Code>HLL_4</Code>, <Code>HLL_6</Code>, or{' '}
|
||||
<Code>HLL_8</Code>.
|
||||
</>
|
||||
),
|
||||
},
|
||||
// quantilesDoublesSketch
|
||||
{
|
||||
name: 'k',
|
||||
type: 'number',
|
||||
defined: m => m.type === 'quantilesDoublesSketch',
|
||||
defaultValue: 128,
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
Parameter that determines the accuracy and size of the sketch. Higher k means higher
|
||||
accuracy but more space to store sketches.
|
||||
</p>
|
||||
<p>
|
||||
Must be a power of 2 from 2 to 32768. See the{' '}
|
||||
<ExternalLink href="https://datasketches.apache.org/docs/Quantiles/QuantilesAccuracy.html">
|
||||
Quantiles Accuracy
|
||||
</ExternalLink>{' '}
|
||||
for details.
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
// momentSketch
|
||||
{
|
||||
name: 'k',
|
||||
type: 'number',
|
||||
defined: m => m.type === 'momentSketch',
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
Parameter that determines the accuracy and size of the sketch. Higher k means higher
|
||||
accuracy but more space to store sketches. Usable range is generally [3,15]
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'compress',
|
||||
type: 'boolean',
|
||||
defined: m => m.type === 'momentSketch',
|
||||
defaultValue: true,
|
||||
info: (
|
||||
<>
|
||||
Flag for whether the aggregator compresses numeric values using arcsinh. Can improve
|
||||
robustness to skewed and long-tailed distributions, but reduces accuracy slightly on more
|
||||
uniform distributions.
|
||||
</>
|
||||
),
|
||||
},
|
||||
// fixedBucketsHistogram
|
||||
{
|
||||
name: 'lowerLimit',
|
||||
type: 'number',
|
||||
defined: m => m.type === 'fixedBucketsHistogram',
|
||||
required: true,
|
||||
info: <>Lower limit of the histogram.</>,
|
||||
},
|
||||
{
|
||||
name: 'upperLimit',
|
||||
type: 'number',
|
||||
defined: m => m.type === 'fixedBucketsHistogram',
|
||||
required: true,
|
||||
info: <>Upper limit of the histogram.</>,
|
||||
},
|
||||
{
|
||||
name: 'numBuckets',
|
||||
type: 'number',
|
||||
defined: m => m.type === 'fixedBucketsHistogram',
|
||||
defaultValue: 10,
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
Number of buckets for the histogram. The range <Code>[lowerLimit, upperLimit]</Code> will be
|
||||
divided into <Code>numBuckets</Code> intervals of equal size.
|
||||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'outlierHandlingMode',
|
||||
type: 'string',
|
||||
defined: m => m.type === 'fixedBucketsHistogram',
|
||||
required: true,
|
||||
suggestions: ['ignore', 'overflow', 'clip'],
|
||||
info: (
|
||||
<>
|
||||
<p>
|
||||
Specifies how values outside of <Code>[lowerLimit, upperLimit]</Code> will be handled.
|
||||
</p>
|
||||
<p>
|
||||
Supported modes are <Code>ignore</Code>, <Code>overflow</Code>, and <Code>clip</Code>. See
|
||||
<ExternalLink
|
||||
href={`${getLink(
|
||||
'DOCS',
|
||||
)}/development/extensions-core/approximate-histograms.html#outlier-handling-modes`}
|
||||
>
|
||||
outlier handling modes
|
||||
</ExternalLink>{' '}
|
||||
for more details.
|
||||
</p>
|
||||
</>
|
||||
),
|
||||
},
|
||||
// hyperUnique
|
||||
{
|
||||
name: 'isInputHyperUnique',
|
||||
type: 'boolean',
|
||||
defined: m => m.type === 'hyperUnique',
|
||||
defaultValue: false,
|
||||
info: (
|
||||
<>
|
||||
This can be set to true to index precomputed HLL (Base64 encoded output from druid-hll is
|
||||
expected).
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
export function getMetricSpecName(metricSpec: MetricSpec): string {
|
||||
return (
|
||||
metricSpec.name || (metricSpec.aggregator ? getMetricSpecName(metricSpec.aggregator) : '?')
|
||||
);
|
||||
}
|
||||
|
||||
export function getMetricSpecSingleFieldName(metricSpec: MetricSpec): string | undefined {
|
||||
return (
|
||||
metricSpec.fieldName ||
|
||||
(metricSpec.aggregator ? getMetricSpecSingleFieldName(metricSpec.aggregator) : undefined)
|
||||
);
|
||||
}
|
||||
|
||||
export function getMetricSpecOutputType(metricSpec: MetricSpec): string | undefined {
|
||||
if (metricSpec.aggregator) return getMetricSpecOutputType(metricSpec.aggregator);
|
||||
const m = String(metricSpec.type).match(/^(long|float|double)/);
|
||||
if (!m) return;
|
||||
return m[1];
|
||||
}
|
||||
|
||||
export function getMetricSpecs(
|
||||
headerAndRows: HeaderAndRows,
|
||||
typeHints: Record<string, string>,
|
||||
): MetricSpec[] {
|
||||
return [{ name: 'count', type: 'count' }].concat(
|
||||
filterMap(headerAndRows.header, h => {
|
||||
if (h === '__time') return;
|
||||
const type = typeHints[h] || getColumnTypeFromHeaderAndRows(headerAndRows, h);
|
||||
switch (type) {
|
||||
case 'double':
|
||||
return { name: `sum_${h}`, type: 'doubleSum', fieldName: h };
|
||||
case 'float':
|
||||
return { name: `sum_${h}`, type: 'floatSum', fieldName: h };
|
||||
case 'long':
|
||||
return { name: `sum_${h}`, type: 'longSum', fieldName: h };
|
||||
default:
|
||||
return;
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { timeFormatMatches } from './druid-time';
|
||||
import { timeFormatMatches } from './time';
|
||||
|
||||
describe('timeFormatMatches', () => {
|
||||
it('works for auto', () => {
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { jodaFormatToRegExp } from './joda-to-regexp';
|
||||
import { jodaFormatToRegExp } from '../utils/joda-to-regexp';
|
||||
|
||||
export const NUMERIC_TIME_FORMATS: string[] = ['posix', 'millis', 'micro', 'nano'];
|
||||
export const BASIC_TIME_FORMATS: string[] = ['auto', 'iso'].concat(NUMERIC_TIME_FORMATS);
|
|
@ -0,0 +1,157 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import React from 'react';
|
||||
|
||||
import { ExternalLink, Field } from '../components';
|
||||
import { deepGet, EMPTY_ARRAY, EMPTY_OBJECT } from '../utils';
|
||||
|
||||
import { IngestionSpec } from './ingestion-spec';
|
||||
import {
|
||||
BASIC_TIME_FORMATS,
|
||||
DATE_ONLY_TIME_FORMATS,
|
||||
DATETIME_TIME_FORMATS,
|
||||
OTHER_TIME_FORMATS,
|
||||
} from './time';
|
||||
import { Transform } from './transform-spec';
|
||||
|
||||
const NO_SUCH_COLUMN = '!!!_no_such_column_!!!';
|
||||
|
||||
export const PLACEHOLDER_TIMESTAMP_SPEC: TimestampSpec = {
|
||||
column: NO_SUCH_COLUMN,
|
||||
missingValue: '1970-01-01T00:00:00Z',
|
||||
};
|
||||
|
||||
export const CONSTANT_TIMESTAMP_SPEC: TimestampSpec = {
|
||||
column: NO_SUCH_COLUMN,
|
||||
missingValue: '2010-01-01T00:00:00Z',
|
||||
};
|
||||
|
||||
export type TimestampSchema = 'none' | 'column' | 'expression';
|
||||
|
||||
export function getTimestampSchema(spec: IngestionSpec): TimestampSchema {
|
||||
const transforms: Transform[] =
|
||||
deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
|
||||
|
||||
const timeTransform = transforms.find(transform => transform.name === '__time');
|
||||
if (timeTransform) return 'expression';
|
||||
|
||||
const timestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec') || EMPTY_OBJECT;
|
||||
return timestampSpec.column === NO_SUCH_COLUMN ? 'none' : 'column';
|
||||
}
|
||||
|
||||
export interface TimestampSpec {
|
||||
column?: string;
|
||||
format?: string;
|
||||
missingValue?: string;
|
||||
}
|
||||
|
||||
export function getTimestampSpecColumnFromSpec(spec: IngestionSpec): string {
|
||||
// For the default https://github.com/apache/druid/blob/master/core/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java#L44
|
||||
return deepGet(spec, 'spec.dataSchema.timestampSpec.column') || 'timestamp';
|
||||
}
|
||||
|
||||
export function getTimestampSpecConstantFromSpec(spec: IngestionSpec): string | undefined {
|
||||
return deepGet(spec, 'spec.dataSchema.timestampSpec.missingValue');
|
||||
}
|
||||
|
||||
export function getTimestampSpecExpressionFromSpec(spec: IngestionSpec): string | undefined {
|
||||
const transforms: Transform[] =
|
||||
deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
|
||||
|
||||
const timeTransform = transforms.find(transform => transform.name === '__time');
|
||||
if (!timeTransform) return;
|
||||
return timeTransform.expression;
|
||||
}
|
||||
|
||||
export function getTimestampDetailFromSpec(spec: IngestionSpec): string {
|
||||
const timestampSchema = getTimestampSchema(spec);
|
||||
switch (timestampSchema) {
|
||||
case 'none':
|
||||
return `Constant: ${getTimestampSpecConstantFromSpec(spec)}`;
|
||||
|
||||
case 'column':
|
||||
return `Column: ${getTimestampSpecColumnFromSpec(spec)}`;
|
||||
|
||||
case 'expression':
|
||||
return `Expression: ${getTimestampSpecExpressionFromSpec(spec)}`;
|
||||
}
|
||||
|
||||
return '-';
|
||||
}
|
||||
|
||||
export const TIMESTAMP_SPEC_FIELDS: Field<TimestampSpec>[] = [
|
||||
{
|
||||
name: 'column',
|
||||
type: 'string',
|
||||
defaultValue: 'timestamp',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'format',
|
||||
type: 'string',
|
||||
defaultValue: 'auto',
|
||||
suggestions: [
|
||||
...BASIC_TIME_FORMATS,
|
||||
{
|
||||
group: 'Date and time formats',
|
||||
suggestions: DATETIME_TIME_FORMATS,
|
||||
},
|
||||
{
|
||||
group: 'Date only formats',
|
||||
suggestions: DATE_ONLY_TIME_FORMATS,
|
||||
},
|
||||
{
|
||||
group: 'Other time formats',
|
||||
suggestions: OTHER_TIME_FORMATS,
|
||||
},
|
||||
],
|
||||
info: (
|
||||
<p>
|
||||
Please specify your timestamp format by using the suggestions menu or typing in a{' '}
|
||||
<ExternalLink href="https://docs.oracle.com/javase/8/docs/api/java/time/format/DateTimeFormatter.html">
|
||||
format string
|
||||
</ExternalLink>
|
||||
.
|
||||
</p>
|
||||
),
|
||||
},
|
||||
{
|
||||
name: 'missingValue',
|
||||
type: 'string',
|
||||
placeholder: '(optional)',
|
||||
info: <p>This value will be used if the specified column can not be found.</p>,
|
||||
},
|
||||
];
|
||||
|
||||
export const CONSTANT_TIMESTAMP_SPEC_FIELDS: Field<TimestampSpec>[] = [
|
||||
{
|
||||
name: 'missingValue',
|
||||
label: 'Placeholder value',
|
||||
type: 'string',
|
||||
info: <p>The placeholder value that will be used as the timestamp.</p>,
|
||||
},
|
||||
];
|
||||
|
||||
export function issueWithTimestampSpec(
|
||||
timestampSpec: TimestampSpec | undefined,
|
||||
): string | undefined {
|
||||
if (!timestampSpec) return 'no spec';
|
||||
if (!timestampSpec.column && !timestampSpec.missingValue) return 'timestamp spec is blank';
|
||||
return;
|
||||
}
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { Code } from '@blueprintjs/core';
|
||||
import React from 'react';
|
||||
|
||||
import { ExternalLink, Field } from '../components';
|
||||
import { getLink } from '../links';
|
||||
|
||||
export interface TransformSpec {
|
||||
transforms?: Transform[];
|
||||
filter?: Record<string, any>;
|
||||
}
|
||||
|
||||
export interface Transform {
|
||||
type: string;
|
||||
name: string;
|
||||
expression: string;
|
||||
}
|
||||
|
||||
export const TRANSFORM_FIELDS: Field<Transform>[] = [
|
||||
{
|
||||
name: 'name',
|
||||
type: 'string',
|
||||
placeholder: 'output_name',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'type',
|
||||
type: 'string',
|
||||
suggestions: ['expression'],
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'expression',
|
||||
type: 'string',
|
||||
placeholder: '"foo" + "bar"',
|
||||
required: true,
|
||||
info: (
|
||||
<>
|
||||
A valid Druid{' '}
|
||||
<ExternalLink href={`${getLink('DOCS')}/misc/math-expr.html`}>expression</ExternalLink>.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
export function getTimestampExpressionFields(transforms: Transform[]): Field<Transform[]>[] {
|
||||
const timeTransformIndex = transforms.findIndex(transform => transform.name === '__time');
|
||||
if (timeTransformIndex < 0) return [];
|
||||
|
||||
return [
|
||||
{
|
||||
name: `${timeTransformIndex}.expression`,
|
||||
label: 'Expression',
|
||||
type: 'string',
|
||||
placeholder: `timestamp_parse(concat("date", ' ', "time"))`,
|
||||
required: true,
|
||||
suggestions: [
|
||||
`timestamp_parse(concat("date", ' ', "time"))`,
|
||||
`timestamp_parse(concat("date", ' ', "time"), 'M/d/yyyy H:mm:ss')`,
|
||||
`timestamp_parse(concat("year", '-', "month", '-', "day"))`,
|
||||
],
|
||||
info: (
|
||||
<>
|
||||
A valid Druid{' '}
|
||||
<ExternalLink href={`${getLink('DOCS')}/misc/math-expr.html`}>expression</ExternalLink>{' '}
|
||||
that should output a millis timestamp. You most likely want to use the{' '}
|
||||
<Code>timestamp_parse</Code> function at the outer level.
|
||||
</>
|
||||
),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
export function addTimestampTransform(transforms: Transform[]): Transform[] {
|
||||
return [
|
||||
{
|
||||
name: '__time',
|
||||
type: 'expression',
|
||||
expression: '',
|
||||
},
|
||||
].concat(transforms);
|
||||
}
|
||||
|
||||
export function removeTimestampTransform(transforms: Transform[]): Transform[] | undefined {
|
||||
const newTransforms = transforms.filter(transform => transform.name !== '__time');
|
||||
return newTransforms.length ? newTransforms : undefined;
|
||||
}
|
|
@ -16,13 +16,13 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
@import '../node_modules/normalize.css/normalize';
|
||||
@import '~normalize.css/normalize';
|
||||
@import '~fontsource-open-sans/index.css';
|
||||
@import './blueprint-overrides';
|
||||
@import '~@blueprintjs/core/src/blueprint';
|
||||
@import '~@blueprintjs/datetime/src/blueprint-datetime';
|
||||
@import '~react-splitter-layout/lib/index';
|
||||
@import '../lib/react-table';
|
||||
@import '../node_modules/react-splitter-layout/lib/index.css';
|
||||
|
||||
html,
|
||||
body {
|
||||
|
@ -45,6 +45,10 @@ body {
|
|||
outline: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.ace-solarized-dark {
|
||||
background-color: rgba($dark-gray1, 0.5);
|
||||
}
|
||||
}
|
||||
|
||||
.app-container {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
import hasOwnProp from 'has-own-prop';
|
||||
|
||||
// This is set to the latest available version and should be updated to the next version before release
|
||||
const DRUID_DOCS_VERSION = '0.19.0';
|
||||
const DRUID_DOCS_VERSION = '0.20.0';
|
||||
|
||||
function fillVersion(str: string): string {
|
||||
return str.replace(/\{\{VERSION}}/g, DRUID_DOCS_VERSION);
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
import { sane } from 'druid-query-toolkit/build/test-utils';
|
||||
|
||||
import { DruidError } from './druid-query';
|
||||
import { DruidError, getDruidErrorMessage, parseHtmlError, parseQueryPlan } from './druid-query';
|
||||
|
||||
describe('DruidQuery', () => {
|
||||
describe('DruidError.parsePosition', () => {
|
||||
|
@ -128,4 +128,18 @@ describe('DruidQuery', () => {
|
|||
expect(suggestion).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('misc', () => {
|
||||
it('parseHtmlError', () => {
|
||||
expect(parseHtmlError('<div></div>')).toMatchInlineSnapshot(`undefined`);
|
||||
});
|
||||
|
||||
it('parseHtmlError', () => {
|
||||
expect(getDruidErrorMessage({})).toMatchInlineSnapshot(`undefined`);
|
||||
});
|
||||
|
||||
it('parseQueryPlan', () => {
|
||||
expect(parseQueryPlan('start')).toMatchInlineSnapshot(`"start"`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,115 +0,0 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { filterMap } from './general';
|
||||
import { DimensionMode, DimensionSpec, IngestionSpec, MetricSpec } from './ingestion-spec';
|
||||
import { deepDelete, deepSet } from './object-change';
|
||||
import { HeaderAndRows } from './sampler';
|
||||
|
||||
export function guessTypeFromSample(sample: any[]): string {
|
||||
const definedValues = sample.filter(v => v != null);
|
||||
if (
|
||||
definedValues.length &&
|
||||
definedValues.every(v => !isNaN(v) && (typeof v === 'number' || typeof v === 'string'))
|
||||
) {
|
||||
if (definedValues.every(v => v % 1 === 0)) {
|
||||
return 'long';
|
||||
} else {
|
||||
return 'double';
|
||||
}
|
||||
} else {
|
||||
return 'string';
|
||||
}
|
||||
}
|
||||
|
||||
export function getColumnTypeFromHeaderAndRows(
|
||||
headerAndRows: HeaderAndRows,
|
||||
column: string,
|
||||
): string {
|
||||
return guessTypeFromSample(
|
||||
filterMap(headerAndRows.rows, (r: any) => (r.parsed ? r.parsed[column] : undefined)),
|
||||
);
|
||||
}
|
||||
|
||||
export function getDimensionSpecs(
|
||||
headerAndRows: HeaderAndRows,
|
||||
hasRollup: boolean,
|
||||
): (string | DimensionSpec)[] {
|
||||
return filterMap(headerAndRows.header, h => {
|
||||
if (h === '__time') return;
|
||||
const guessedType = getColumnTypeFromHeaderAndRows(headerAndRows, h);
|
||||
if (guessedType === 'string') return h;
|
||||
if (hasRollup) return;
|
||||
return {
|
||||
type: guessedType,
|
||||
name: h,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function getMetricSpecs(headerAndRows: HeaderAndRows): MetricSpec[] {
|
||||
return [{ name: 'count', type: 'count' }].concat(
|
||||
filterMap(headerAndRows.header, h => {
|
||||
if (h === '__time') return;
|
||||
const guessedType = getColumnTypeFromHeaderAndRows(headerAndRows, h);
|
||||
switch (guessedType) {
|
||||
case 'double':
|
||||
return { name: `sum_${h}`, type: 'doubleSum', fieldName: h };
|
||||
case 'long':
|
||||
return { name: `sum_${h}`, type: 'longSum', fieldName: h };
|
||||
default:
|
||||
return;
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
export function updateSchemaWithSample(
|
||||
spec: IngestionSpec,
|
||||
headerAndRows: HeaderAndRows,
|
||||
dimensionMode: DimensionMode,
|
||||
rollup: boolean,
|
||||
): IngestionSpec {
|
||||
let newSpec = spec;
|
||||
|
||||
if (dimensionMode === 'auto-detect') {
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.dimensionsSpec.dimensions', []);
|
||||
} else {
|
||||
newSpec = deepDelete(newSpec, 'spec.dataSchema.dimensionsSpec.dimensionExclusions');
|
||||
|
||||
const dimensions = getDimensionSpecs(headerAndRows, rollup);
|
||||
if (dimensions) {
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.dimensionsSpec.dimensions', dimensions);
|
||||
}
|
||||
}
|
||||
|
||||
if (rollup) {
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.queryGranularity', 'HOUR');
|
||||
|
||||
const metrics = getMetricSpecs(headerAndRows);
|
||||
if (metrics) {
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.metricsSpec', metrics);
|
||||
}
|
||||
} else {
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.queryGranularity', 'NONE');
|
||||
newSpec = deepDelete(newSpec, 'spec.dataSchema.metricsSpec');
|
||||
}
|
||||
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.rollup', rollup);
|
||||
return newSpec;
|
||||
}
|
|
@ -55,20 +55,26 @@ describe('general', () => {
|
|||
});
|
||||
|
||||
describe('sqlQueryCustomTableFilter', () => {
|
||||
it('works', () => {
|
||||
it('works with contains', () => {
|
||||
expect(
|
||||
sqlQueryCustomTableFilter({
|
||||
id: 'datasource',
|
||||
value: `hello`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`"LOWER(\\"datasource\\") LIKE LOWER('%hello%')"`);
|
||||
String(
|
||||
sqlQueryCustomTableFilter({
|
||||
id: 'datasource',
|
||||
value: `Hello`,
|
||||
}),
|
||||
),
|
||||
).toEqual(`LOWER("datasource") LIKE '%hello%'`);
|
||||
});
|
||||
|
||||
it('works with exact', () => {
|
||||
expect(
|
||||
sqlQueryCustomTableFilter({
|
||||
id: 'datasource',
|
||||
value: `"hello"`,
|
||||
}),
|
||||
).toMatchInlineSnapshot(`"\\"datasource\\" = 'hello'"`);
|
||||
String(
|
||||
sqlQueryCustomTableFilter({
|
||||
id: 'datasource',
|
||||
value: `"hello"`,
|
||||
}),
|
||||
),
|
||||
).toEqual(`"datasource" = 'hello'`);
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
import { Button, HTMLSelect, InputGroup, Intent } from '@blueprintjs/core';
|
||||
import { IconNames } from '@blueprintjs/icons';
|
||||
import copy from 'copy-to-clipboard';
|
||||
import { SqlExpression, SqlFunction, SqlLiteral, SqlRef } from 'druid-query-toolkit';
|
||||
import FileSaver from 'file-saver';
|
||||
import hasOwnProp from 'has-own-prop';
|
||||
import numeral from 'numeral';
|
||||
|
@ -27,6 +28,10 @@ import { Filter, FilterRender } from 'react-table';
|
|||
|
||||
import { AppToaster } from '../singletons/toaster';
|
||||
|
||||
// These constants are used to make sure that they are not constantly recreated thrashing the pure components
|
||||
export const EMPTY_OBJECT: any = {};
|
||||
export const EMPTY_ARRAY: any[] = [];
|
||||
|
||||
export function wait(ms: number): Promise<void> {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(resolve, ms);
|
||||
|
@ -117,14 +122,15 @@ export function booleanCustomTableFilter(filter: Filter, value: any): boolean {
|
|||
return haystack.includes(needle);
|
||||
}
|
||||
|
||||
export function sqlQueryCustomTableFilter(filter: Filter): string {
|
||||
const columnName = JSON.stringify(filter.id);
|
||||
export function sqlQueryCustomTableFilter(filter: Filter): SqlExpression {
|
||||
const needleAndMode: NeedleAndMode = getNeedleAndMode(filter.value);
|
||||
const needle = needleAndMode.needle;
|
||||
if (needleAndMode.mode === 'exact') {
|
||||
return `${columnName} = '${needle}'`;
|
||||
return SqlRef.columnWithQuotes(filter.id).equal(SqlLiteral.create(needle));
|
||||
} else {
|
||||
return `LOWER(${columnName}) LIKE LOWER('%${needle}%')`;
|
||||
return SqlFunction.simple('LOWER', [SqlRef.columnWithQuotes(filter.id)]).like(
|
||||
SqlLiteral.create(`%${needle.toLowerCase()}%`),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -135,6 +141,10 @@ export function caseInsensitiveContains(testString: string, searchString: string
|
|||
return testString.toLowerCase().includes(searchString.toLowerCase());
|
||||
}
|
||||
|
||||
export function oneOf<T>(thing: T, ...options: T[]): boolean {
|
||||
return options.includes(thing);
|
||||
}
|
||||
|
||||
// ----------------------------
|
||||
|
||||
export function countBy<T>(
|
||||
|
|
|
@ -24,4 +24,5 @@ export * from './query-manager';
|
|||
export * from './query-cursor';
|
||||
export * from './local-storage-keys';
|
||||
export * from './column-metadata';
|
||||
export * from './compaction';
|
||||
export * from './object-change';
|
||||
export * from './capabilities';
|
||||
|
|
|
@ -83,6 +83,17 @@ export function deepSet<T extends Record<string, any>>(value: T, path: string, x
|
|||
return valueCopy;
|
||||
}
|
||||
|
||||
export function deepSetMulti<T extends Record<string, any>>(
|
||||
value: T,
|
||||
changes: Record<string, any>,
|
||||
): T {
|
||||
let newValue = value;
|
||||
for (const k in changes) {
|
||||
newValue = deepSet(newValue, k, changes[k]);
|
||||
}
|
||||
return newValue;
|
||||
}
|
||||
|
||||
export function deepDelete<T extends Record<string, any>>(value: T, path: string): T {
|
||||
const valueCopy = shallowCopy(value);
|
||||
const parts = parsePath(path);
|
||||
|
|
|
@ -165,5 +165,8 @@ export class QueryManager<Q, R> {
|
|||
|
||||
public terminate(): void {
|
||||
this.terminated = true;
|
||||
if (this.currentRunCancelFn) {
|
||||
this.currentRunCancelFn();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,24 +18,31 @@
|
|||
|
||||
import axios from 'axios';
|
||||
|
||||
import { getDruidErrorMessage, queryDruidRune } from './druid-query';
|
||||
import { alphanumericCompare, filterMap, sortWithPrefixSuffix } from './general';
|
||||
import {
|
||||
DimensionsSpec,
|
||||
getDummyTimestampSpec,
|
||||
getSpecType,
|
||||
getTimestampSchema,
|
||||
IngestionSpec,
|
||||
IngestionType,
|
||||
InputFormat,
|
||||
IoConfig,
|
||||
isColumnTimestampSpec,
|
||||
isDruidSource,
|
||||
MetricSpec,
|
||||
PLACEHOLDER_TIMESTAMP_SPEC,
|
||||
TimestampSpec,
|
||||
Transform,
|
||||
TransformSpec,
|
||||
upgradeSpec,
|
||||
} from './ingestion-spec';
|
||||
} from '../druid-models';
|
||||
|
||||
import { getDruidErrorMessage, queryDruidRune } from './druid-query';
|
||||
import {
|
||||
alphanumericCompare,
|
||||
EMPTY_ARRAY,
|
||||
filterMap,
|
||||
oneOf,
|
||||
sortWithPrefixSuffix,
|
||||
} from './general';
|
||||
import { deepGet, deepSet } from './object-change';
|
||||
|
||||
const SAMPLER_URL = `/druid/indexer/v1/sampler`;
|
||||
|
@ -231,7 +238,8 @@ function cleanupQueryGranularity(queryGranularity: any): any {
|
|||
if (typeof queryGranularityType !== 'string') return queryGranularity;
|
||||
queryGranularityType = queryGranularityType.toUpperCase();
|
||||
|
||||
const knownGranularity = [
|
||||
const knownGranularity = oneOf(
|
||||
queryGranularityType,
|
||||
'NONE',
|
||||
'SECOND',
|
||||
'MINUTE',
|
||||
|
@ -240,7 +248,7 @@ function cleanupQueryGranularity(queryGranularity: any): any {
|
|||
'WEEK',
|
||||
'MONTH',
|
||||
'YEAR',
|
||||
].includes(queryGranularityType);
|
||||
);
|
||||
|
||||
return knownGranularity ? queryGranularityType : queryGranularity;
|
||||
}
|
||||
|
@ -272,7 +280,7 @@ export async function sampleForConnect(
|
|||
ioConfig,
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
timestampSpec: getDummyTimestampSpec(),
|
||||
timestampSpec: PLACEHOLDER_TIMESTAMP_SPEC,
|
||||
dimensionsSpec: {},
|
||||
},
|
||||
} as any,
|
||||
|
@ -326,7 +334,7 @@ export async function sampleForParser(
|
|||
ioConfig,
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
timestampSpec: getDummyTimestampSpec(),
|
||||
timestampSpec: PLACEHOLDER_TIMESTAMP_SPEC,
|
||||
dimensionsSpec: {},
|
||||
},
|
||||
},
|
||||
|
@ -342,7 +350,7 @@ export async function sampleForTimestamp(
|
|||
): Promise<SampleResponse> {
|
||||
const samplerType = getSpecType(spec);
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec');
|
||||
const columnTimestampSpec = isColumnTimestampSpec(timestampSpec);
|
||||
const timestampSchema = getTimestampSchema(spec);
|
||||
|
||||
// First do a query with a static timestamp spec
|
||||
const sampleSpecColumns: SampleSpec = {
|
||||
|
@ -352,7 +360,7 @@ export async function sampleForTimestamp(
|
|||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
dimensionsSpec: {},
|
||||
timestampSpec: columnTimestampSpec ? getDummyTimestampSpec() : timestampSpec,
|
||||
timestampSpec: timestampSchema === 'column' ? PLACEHOLDER_TIMESTAMP_SPEC : timestampSpec,
|
||||
},
|
||||
},
|
||||
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||
|
@ -364,7 +372,10 @@ export async function sampleForTimestamp(
|
|||
);
|
||||
|
||||
// If we are not parsing a column then there is nothing left to do
|
||||
if (!columnTimestampSpec) return sampleColumns;
|
||||
if (timestampSchema === 'none') return sampleColumns;
|
||||
|
||||
const transforms: Transform[] =
|
||||
deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
|
||||
|
||||
// If we are trying to parts a column then get a bit fancy:
|
||||
// Query the same sample again (same cache key)
|
||||
|
@ -376,6 +387,9 @@ export async function sampleForTimestamp(
|
|||
dataSource: 'sample',
|
||||
dimensionsSpec: {},
|
||||
timestampSpec,
|
||||
transformSpec: {
|
||||
transforms: transforms.filter(transform => transform.name === '__time'),
|
||||
},
|
||||
},
|
||||
},
|
||||
samplerConfig: BASE_SAMPLER_CONFIG,
|
||||
|
|
|
@ -16,18 +16,11 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { getDruidErrorMessage, parseHtmlError, parseQueryPlan } from './druid-query';
|
||||
import {
|
||||
getColumnTypeFromHeaderAndRows,
|
||||
getDimensionSpecs,
|
||||
getMetricSpecs,
|
||||
guessTypeFromSample,
|
||||
updateSchemaWithSample,
|
||||
} from './druid-type';
|
||||
import { IngestionSpec } from './ingestion-spec';
|
||||
import { IngestionSpec } from '../druid-models';
|
||||
|
||||
import { applyCache, headerFromSampleResponse } from './sampler';
|
||||
|
||||
describe('test-utils', () => {
|
||||
describe('utils', () => {
|
||||
const ingestionSpec: IngestionSpec = {
|
||||
type: 'index_parallel',
|
||||
spec: {
|
||||
|
@ -123,161 +116,4 @@ describe('test-utils', () => {
|
|||
}
|
||||
`);
|
||||
});
|
||||
|
||||
// it('spec-utils sampleForParser', async () => {
|
||||
// expect(await sampleForParser(ingestionSpec, 'start', 'abc123')).toMatchInlineSnapshot(
|
||||
// `Promise {}`,
|
||||
// );
|
||||
// });
|
||||
//
|
||||
// it('spec-utils SampleSpec', async () => {
|
||||
// expect(await sampleForConnect(ingestionSpec, 'start')).toMatchInlineSnapshot(`Promise {}`);
|
||||
// });
|
||||
//
|
||||
// it('spec-utils sampleForTimestamp', async () => {
|
||||
// expect(await sampleForTimestamp(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
|
||||
// });
|
||||
//
|
||||
// it('spec-utils sampleForTransform', async () => {
|
||||
// expect(await sampleForTransform(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
|
||||
// });
|
||||
//
|
||||
// it('spec-utils sampleForFilter', async () => {
|
||||
// expect(await sampleForFilter(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
|
||||
// });
|
||||
//
|
||||
// it('spec-utils sampleForSchema', async () => {
|
||||
// expect(await sampleForSchema(ingestionSpec, 'start', cacheRows)).toMatchInlineSnapshot();
|
||||
// });
|
||||
//
|
||||
// it('spec-utils sampleForExampleManifests', async () => {
|
||||
// expect(await sampleForExampleManifests('some url')).toMatchInlineSnapshot();
|
||||
// });
|
||||
});
|
||||
|
||||
describe('druid-type.ts', () => {
|
||||
const ingestionSpec: IngestionSpec = {
|
||||
type: 'index_parallel',
|
||||
spec: {
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
inputSource: {
|
||||
type: 'http',
|
||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||
},
|
||||
inputFormat: {
|
||||
type: 'json',
|
||||
},
|
||||
},
|
||||
tuningConfig: {
|
||||
type: 'index_parallel',
|
||||
},
|
||||
dataSchema: {
|
||||
dataSource: 'wikipedia',
|
||||
granularitySpec: {
|
||||
type: 'uniform',
|
||||
segmentGranularity: 'DAY',
|
||||
queryGranularity: 'HOUR',
|
||||
},
|
||||
timestampSpec: {
|
||||
column: 'timestamp',
|
||||
format: 'iso',
|
||||
},
|
||||
dimensionsSpec: {},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
it('spec-utils guessTypeFromSample', () => {
|
||||
expect(guessTypeFromSample([])).toMatchInlineSnapshot(`"string"`);
|
||||
});
|
||||
|
||||
it('spec-utils getColumnTypeFromHeaderAndRows', () => {
|
||||
expect(
|
||||
getColumnTypeFromHeaderAndRows({ header: ['header'], rows: [] }, 'header'),
|
||||
).toMatchInlineSnapshot(`"string"`);
|
||||
});
|
||||
|
||||
it('spec-utils getDimensionSpecs', () => {
|
||||
expect(getDimensionSpecs({ header: ['header'], rows: [] }, true)).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"header",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('spec-utils getMetricSecs', () => {
|
||||
expect(getMetricSpecs({ header: ['header'], rows: [] })).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"name": "count",
|
||||
"type": "count",
|
||||
},
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('spec-utils updateSchemaWithSample', () => {
|
||||
expect(
|
||||
updateSchemaWithSample(ingestionSpec, { header: ['header'], rows: [] }, 'specific', true),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"spec": Object {
|
||||
"dataSchema": Object {
|
||||
"dataSource": "wikipedia",
|
||||
"dimensionsSpec": Object {
|
||||
"dimensions": Array [
|
||||
"header",
|
||||
],
|
||||
},
|
||||
"granularitySpec": Object {
|
||||
"queryGranularity": "HOUR",
|
||||
"rollup": true,
|
||||
"segmentGranularity": "DAY",
|
||||
"type": "uniform",
|
||||
},
|
||||
"metricsSpec": Array [
|
||||
Object {
|
||||
"name": "count",
|
||||
"type": "count",
|
||||
},
|
||||
],
|
||||
"timestampSpec": Object {
|
||||
"column": "timestamp",
|
||||
"format": "iso",
|
||||
},
|
||||
},
|
||||
"ioConfig": Object {
|
||||
"inputFormat": Object {
|
||||
"type": "json",
|
||||
},
|
||||
"inputSource": Object {
|
||||
"type": "http",
|
||||
"uris": Array [
|
||||
"https://static.imply.io/data/wikipedia.json.gz",
|
||||
],
|
||||
},
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"tuningConfig": Object {
|
||||
"type": "index_parallel",
|
||||
},
|
||||
},
|
||||
"type": "index_parallel",
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
describe('druid-query.ts', () => {
|
||||
it('spec-utils parseHtmlError', () => {
|
||||
expect(parseHtmlError('<div></div>')).toMatchInlineSnapshot(`undefined`);
|
||||
});
|
||||
|
||||
it('spec-utils parseHtmlError', () => {
|
||||
expect(getDruidErrorMessage({})).toMatchInlineSnapshot(`undefined`);
|
||||
});
|
||||
|
||||
it('spec-utils parseQueryPlan', () => {
|
||||
expect(parseQueryPlan('start')).toMatchInlineSnapshot(`"start"`);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -64,6 +64,7 @@ exports[`data source view matches snapshot 1`] = `
|
|||
"Segment load/drop queues",
|
||||
"Total data size",
|
||||
"Segment size",
|
||||
"Segment granularity",
|
||||
"Total rows",
|
||||
"Avg. row size",
|
||||
"Replicated size",
|
||||
|
@ -193,6 +194,19 @@ exports[`data source view matches snapshot 1`] = `
|
|||
"show": true,
|
||||
"width": 220,
|
||||
},
|
||||
Object {
|
||||
"Cell": [Function],
|
||||
"Header": <React.Fragment>
|
||||
Segment
|
||||
<br />
|
||||
granularity
|
||||
</React.Fragment>,
|
||||
"accessor": [Function],
|
||||
"filterable": false,
|
||||
"id": "segment_granularity",
|
||||
"show": true,
|
||||
"width": 100,
|
||||
},
|
||||
Object {
|
||||
"Cell": [Function],
|
||||
"Header": <React.Fragment>
|
||||
|
|
|
@ -39,14 +39,20 @@ import {
|
|||
} from '../../components';
|
||||
import { AsyncActionDialog, CompactionDialog, RetentionDialog } from '../../dialogs';
|
||||
import { DatasourceTableActionDialog } from '../../dialogs/datasource-table-action-dialog/datasource-table-action-dialog';
|
||||
import {
|
||||
CompactionConfig,
|
||||
CompactionStatus,
|
||||
formatCompactionConfigAndStatus,
|
||||
zeroCompactionStatus,
|
||||
} from '../../druid-models';
|
||||
import { AppToaster } from '../../singletons/toaster';
|
||||
import {
|
||||
addFilter,
|
||||
CompactionConfig,
|
||||
CompactionStatus,
|
||||
Capabilities,
|
||||
CapabilitiesMode,
|
||||
countBy,
|
||||
deepGet,
|
||||
formatBytes,
|
||||
formatCompactionConfigAndStatus,
|
||||
formatInteger,
|
||||
formatMillions,
|
||||
formatPercent,
|
||||
|
@ -57,13 +63,10 @@ import {
|
|||
queryDruidSql,
|
||||
QueryManager,
|
||||
QueryState,
|
||||
zeroCompactionStatus,
|
||||
} from '../../utils';
|
||||
import { BasicAction } from '../../utils/basic-action';
|
||||
import { Capabilities, CapabilitiesMode } from '../../utils/capabilities';
|
||||
import { Rule, RuleUtil } from '../../utils/load-rule';
|
||||
import { LocalStorageBackedArray } from '../../utils/local-storage-backed-array';
|
||||
import { deepGet } from '../../utils/object-change';
|
||||
|
||||
import './datasource-view.scss';
|
||||
|
||||
|
@ -74,6 +77,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
|
|||
'Segment load/drop queues',
|
||||
'Total data size',
|
||||
'Segment size',
|
||||
'Segment granularity',
|
||||
'Total rows',
|
||||
'Avg. row size',
|
||||
'Replicated size',
|
||||
|
@ -100,6 +104,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
|
|||
'Segment load/drop queues',
|
||||
'Total data size',
|
||||
'Segment size',
|
||||
'Segment granularity',
|
||||
'Total rows',
|
||||
'Avg. row size',
|
||||
'Replicated size',
|
||||
|
@ -149,6 +154,11 @@ interface DatasourceQueryResultRow {
|
|||
readonly num_available_segments: number;
|
||||
readonly num_segments_to_load: number;
|
||||
readonly num_segments_to_drop: number;
|
||||
readonly minute_aligned_segments: number;
|
||||
readonly hour_aligned_segments: number;
|
||||
readonly day_aligned_segments: number;
|
||||
readonly month_aligned_segments: number;
|
||||
readonly year_aligned_segments: number;
|
||||
readonly total_data_size: number;
|
||||
readonly replicated_size: number;
|
||||
readonly min_segment_rows: number;
|
||||
|
@ -158,6 +168,17 @@ interface DatasourceQueryResultRow {
|
|||
readonly avg_row_size: number;
|
||||
}
|
||||
|
||||
function segmentGranularityCountsToRank(row: DatasourceQueryResultRow): number {
|
||||
return (
|
||||
Number(Boolean(row.num_segments)) +
|
||||
Number(Boolean(row.minute_aligned_segments)) +
|
||||
Number(Boolean(row.hour_aligned_segments)) +
|
||||
Number(Boolean(row.day_aligned_segments)) +
|
||||
Number(Boolean(row.month_aligned_segments)) +
|
||||
Number(Boolean(row.year_aligned_segments))
|
||||
);
|
||||
}
|
||||
|
||||
interface Datasource extends DatasourceQueryResultRow {
|
||||
readonly rules: Rule[];
|
||||
readonly compactionConfig?: CompactionConfig;
|
||||
|
@ -227,6 +248,11 @@ export class DatasourcesView extends React.PureComponent<
|
|||
COUNT(*) FILTER (WHERE is_available = 1 AND ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1)) AS num_available_segments,
|
||||
COUNT(*) FILTER (WHERE is_published = 1 AND is_overshadowed = 0 AND is_available = 0) AS num_segments_to_load,
|
||||
COUNT(*) FILTER (WHERE is_available = 1 AND NOT ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1)) AS num_segments_to_drop,
|
||||
COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%:00.000Z' AND "end" LIKE '%:00.000Z') AS minute_aligned_segments,
|
||||
COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%:00:00.000Z' AND "end" LIKE '%:00:00.000Z') AS hour_aligned_segments,
|
||||
COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%T00:00:00.000Z' AND "end" LIKE '%T00:00:00.000Z') AS day_aligned_segments,
|
||||
COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%-01T00:00:00.000Z' AND "end" LIKE '%-01T00:00:00.000Z') AS month_aligned_segments,
|
||||
COUNT(*) FILTER (WHERE ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AND "start" LIKE '%-01-01T00:00:00.000Z' AND "end" LIKE '%-01-01T00:00:00.000Z') AS year_aligned_segments,
|
||||
SUM("size") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS total_data_size,
|
||||
SUM("size" * "num_replicas") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS replicated_size,
|
||||
MIN("num_rows") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS min_segment_rows,
|
||||
|
@ -306,6 +332,11 @@ GROUP BY 1`;
|
|||
num_segments: numSegments,
|
||||
num_segments_to_load: segmentsToLoad,
|
||||
num_segments_to_drop: 0,
|
||||
minute_aligned_segments: -1,
|
||||
hour_aligned_segments: -1,
|
||||
day_aligned_segments: -1,
|
||||
month_aligned_segments: -1,
|
||||
year_aligned_segments: -1,
|
||||
replicated_size: -1,
|
||||
total_data_size: totalDataSize,
|
||||
min_segment_rows: -1,
|
||||
|
@ -1031,6 +1062,37 @@ GROUP BY 1`;
|
|||
</>
|
||||
),
|
||||
},
|
||||
{
|
||||
Header: twoLines('Segment', 'granularity'),
|
||||
show: capabilities.hasSql() && hiddenColumns.exists('Segment granularity'),
|
||||
id: 'segment_granularity',
|
||||
accessor: segmentGranularityCountsToRank,
|
||||
filterable: false,
|
||||
width: 100,
|
||||
Cell: ({ original }) => {
|
||||
const segmentGranularities: string[] = [];
|
||||
if (!original.num_segments) return '-';
|
||||
if (original.num_segments - original.minute_aligned_segments) {
|
||||
segmentGranularities.push('Sub minute');
|
||||
}
|
||||
if (original.minute_aligned_segments - original.hour_aligned_segments) {
|
||||
segmentGranularities.push('Minute');
|
||||
}
|
||||
if (original.hour_aligned_segments - original.day_aligned_segments) {
|
||||
segmentGranularities.push('Hour');
|
||||
}
|
||||
if (original.day_aligned_segments - original.month_aligned_segments) {
|
||||
segmentGranularities.push('Day');
|
||||
}
|
||||
if (original.month_aligned_segments - original.year_aligned_segments) {
|
||||
segmentGranularities.push('Month');
|
||||
}
|
||||
if (original.year_aligned_segments) {
|
||||
segmentGranularities.push('Year');
|
||||
}
|
||||
return segmentGranularities.join(', ');
|
||||
},
|
||||
},
|
||||
{
|
||||
Header: twoLines('Total', 'rows'),
|
||||
show: capabilities.hasSql() && hiddenColumns.exists('Total rows'),
|
||||
|
|
|
@ -22,9 +22,7 @@ import { sum } from 'd3-array';
|
|||
import React from 'react';
|
||||
|
||||
import { useQueryManager } from '../../../hooks';
|
||||
import { pluralIfNeeded, queryDruidSql } from '../../../utils';
|
||||
import { Capabilities } from '../../../utils/capabilities';
|
||||
import { deepGet } from '../../../utils/object-change';
|
||||
import { Capabilities, deepGet, pluralIfNeeded, queryDruidSql } from '../../../utils';
|
||||
import { HomeViewCard } from '../home-view-card/home-view-card';
|
||||
|
||||
export interface SegmentCounts {
|
||||
|
|
|
@ -32,7 +32,6 @@ describe('tasks view', () => {
|
|||
datasourceId={'datasource'}
|
||||
goToDatasource={() => {}}
|
||||
goToQuery={() => {}}
|
||||
goToMiddleManager={() => {}}
|
||||
goToLoadData={() => {}}
|
||||
capabilities={Capabilities.FULL}
|
||||
/>,
|
||||
|
|
|
@ -45,11 +45,13 @@ import {
|
|||
addFilter,
|
||||
addFilterRaw,
|
||||
booleanCustomTableFilter,
|
||||
deepGet,
|
||||
formatDuration,
|
||||
getDruidErrorMessage,
|
||||
localStorageGet,
|
||||
LocalStorageKeys,
|
||||
localStorageSet,
|
||||
oneOf,
|
||||
queryDruidSql,
|
||||
QueryManager,
|
||||
QueryState,
|
||||
|
@ -57,7 +59,6 @@ import {
|
|||
import { BasicAction } from '../../utils/basic-action';
|
||||
import { Capabilities } from '../../utils/capabilities';
|
||||
import { LocalStorageBackedArray } from '../../utils/local-storage-backed-array';
|
||||
import { deepGet } from '../../utils/object-change';
|
||||
|
||||
import './ingestion-view.scss';
|
||||
|
||||
|
@ -108,7 +109,6 @@ export interface IngestionViewProps {
|
|||
openDialog: string | undefined;
|
||||
goToDatasource: (datasource: string) => void;
|
||||
goToQuery: (initSql: string) => void;
|
||||
goToMiddleManager: (middleManager: string) => void;
|
||||
goToLoadData: (supervisorId?: string, taskId?: string) => void;
|
||||
capabilities: Capabilities;
|
||||
}
|
||||
|
@ -385,7 +385,7 @@ ORDER BY "rank" DESC, "created_time" DESC`;
|
|||
const { goToDatasource, goToLoadData } = this.props;
|
||||
|
||||
const actions: BasicAction[] = [];
|
||||
if (type === 'kafka' || type === 'kinesis') {
|
||||
if (oneOf(type, 'kafka', 'kinesis')) {
|
||||
actions.push(
|
||||
{
|
||||
icon: IconNames.MULTI_SELECT,
|
||||
|
@ -659,14 +659,14 @@ ORDER BY "rank" DESC, "created_time" DESC`;
|
|||
onAction: () => goToDatasource(datasource),
|
||||
});
|
||||
}
|
||||
if (type === 'index' || type === 'index_parallel') {
|
||||
if (oneOf(type, 'index', 'index_parallel')) {
|
||||
actions.push({
|
||||
icon: IconNames.CLOUD_UPLOAD,
|
||||
title: 'Open in data loader',
|
||||
onAction: () => goToLoadData(undefined, id),
|
||||
});
|
||||
}
|
||||
if (status === 'RUNNING' || status === 'WAITING' || status === 'PENDING') {
|
||||
if (oneOf(status, 'RUNNING', 'WAITING', 'PENDING')) {
|
||||
actions.push({
|
||||
icon: IconNames.CROSS,
|
||||
title: 'Kill',
|
||||
|
@ -704,7 +704,6 @@ ORDER BY "rank" DESC, "created_time" DESC`;
|
|||
}
|
||||
|
||||
renderTaskTable() {
|
||||
const { goToMiddleManager } = this.props;
|
||||
const {
|
||||
tasksState,
|
||||
taskFilter,
|
||||
|
@ -812,21 +811,12 @@ ORDER BY "rank" DESC, "created_time" DESC`;
|
|||
}),
|
||||
Cell: row => {
|
||||
if (row.aggregated) return '';
|
||||
const { status, location } = row.original;
|
||||
const locationHostname = location ? location.split(':')[0] : null;
|
||||
const { status } = row.original;
|
||||
const errorMsg = row.original.error_msg;
|
||||
return (
|
||||
<span>
|
||||
<span style={{ color: statusToColor(status) }}>● </span>
|
||||
{status}
|
||||
{location && (
|
||||
<a
|
||||
onClick={() => goToMiddleManager(locationHostname)}
|
||||
title={`Go to: ${locationHostname}`}
|
||||
>
|
||||
➚
|
||||
</a>
|
||||
)}
|
||||
{errorMsg && (
|
||||
<a
|
||||
onClick={() => this.setState({ alertErrorMsg: errorMsg })}
|
||||
|
|
|
@ -21,8 +21,8 @@ import React from 'react';
|
|||
import ReactTable from 'react-table';
|
||||
|
||||
import { TableCell } from '../../../components';
|
||||
import { DruidFilter } from '../../../druid-models';
|
||||
import { caseInsensitiveContains, filterMap } from '../../../utils';
|
||||
import { DruidFilter } from '../../../utils/ingestion-spec';
|
||||
import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
|
||||
|
||||
import './filter-table.scss';
|
||||
|
|
|
@ -19,6 +19,12 @@
|
|||
@import '~@blueprintjs/core/src/common/colors';
|
||||
@import '../../variables';
|
||||
|
||||
$control-bar-width: 300px;
|
||||
|
||||
$icon-width: 100px;
|
||||
$actual-icon-width: 520px;
|
||||
$actual-icon-height: 400px;
|
||||
|
||||
@mixin sunk-panel {
|
||||
background: rgba($dark-gray1, 0.5);
|
||||
border-radius: $pt-border-radius;
|
||||
|
@ -30,7 +36,7 @@
|
|||
height: 100%;
|
||||
display: grid;
|
||||
grid-gap: $thin-padding 5px;
|
||||
grid-template-columns: 1fr 280px;
|
||||
grid-template-columns: 1fr $control-bar-width;
|
||||
grid-template-rows: 60px 1fr 28px;
|
||||
grid-template-areas:
|
||||
'navi navi'
|
||||
|
@ -133,7 +139,8 @@
|
|||
}
|
||||
|
||||
img {
|
||||
width: 100px;
|
||||
width: $icon-width;
|
||||
height: $icon-width * ($actual-icon-height / $actual-icon-width);
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
|
@ -144,7 +151,7 @@
|
|||
&.tuning,
|
||||
&.publish {
|
||||
grid-gap: 20px 40px;
|
||||
grid-template-columns: 1fr 1fr 280px;
|
||||
grid-template-columns: 1fr 1fr $control-bar-width;
|
||||
grid-template-areas:
|
||||
'navi navi navi'
|
||||
'main othr ctrl'
|
||||
|
|
|
@ -54,53 +54,46 @@ import {
|
|||
} from '../../components';
|
||||
import { FormGroupWithInfo } from '../../components/form-group-with-info/form-group-with-info';
|
||||
import { AsyncActionDialog } from '../../dialogs';
|
||||
import { getLink } from '../../links';
|
||||
import { AppToaster } from '../../singletons/toaster';
|
||||
import { UrlBaser } from '../../singletons/url-baser';
|
||||
import {
|
||||
filterMap,
|
||||
getDruidErrorMessage,
|
||||
localStorageGet,
|
||||
LocalStorageKeys,
|
||||
localStorageSet,
|
||||
parseJson,
|
||||
pluralIfNeeded,
|
||||
QueryState,
|
||||
} from '../../utils';
|
||||
import { NUMERIC_TIME_FORMATS, possibleDruidFormatForValues } from '../../utils/druid-time';
|
||||
import { updateSchemaWithSample } from '../../utils/druid-type';
|
||||
addTimestampTransform,
|
||||
CONSTANT_TIMESTAMP_SPEC,
|
||||
CONSTANT_TIMESTAMP_SPEC_FIELDS,
|
||||
DIMENSION_SPEC_FIELDS,
|
||||
FILTER_FIELDS,
|
||||
FLATTEN_FIELD_FIELDS,
|
||||
getTimestampExpressionFields,
|
||||
getTimestampSchema,
|
||||
INPUT_FORMAT_FIELDS,
|
||||
METRIC_SPEC_FIELDS,
|
||||
removeTimestampTransform,
|
||||
TIMESTAMP_SPEC_FIELDS,
|
||||
TimestampSpec,
|
||||
Transform,
|
||||
TRANSFORM_FIELDS,
|
||||
updateSchemaWithSample,
|
||||
} from '../../druid-models';
|
||||
import {
|
||||
adjustIngestionSpec,
|
||||
adjustTuningConfig,
|
||||
cleanSpec,
|
||||
computeFlattenPathsForData,
|
||||
DimensionMode,
|
||||
DimensionSpec,
|
||||
DimensionsSpec,
|
||||
DruidFilter,
|
||||
EMPTY_ARRAY,
|
||||
EMPTY_OBJECT,
|
||||
fillDataSourceNameIfNeeded,
|
||||
fillInputFormat,
|
||||
FlattenField,
|
||||
getConstantTimestampSpec,
|
||||
getDimensionMode,
|
||||
getDimensionSpecFormFields,
|
||||
getFilterFormFields,
|
||||
getFlattenFieldFormFields,
|
||||
getIngestionComboType,
|
||||
getIngestionDocLink,
|
||||
getIngestionImage,
|
||||
getIngestionTitle,
|
||||
getInputFormatFormFields,
|
||||
getIoConfigFormFields,
|
||||
getIoConfigTuningFormFields,
|
||||
getMetricSpecFormFields,
|
||||
getPartitionRelatedTuningSpecFormFields,
|
||||
getRequiredModule,
|
||||
getRollup,
|
||||
getSpecType,
|
||||
getTimestampSpecFormFields,
|
||||
getTransformFormFields,
|
||||
getTuningSpecFormFields,
|
||||
GranularitySpec,
|
||||
IngestionComboTypeWithExtra,
|
||||
|
@ -110,7 +103,6 @@ import {
|
|||
invalidIoConfig,
|
||||
invalidTuningConfig,
|
||||
IoConfig,
|
||||
isColumnTimestampSpec,
|
||||
isDruidSource,
|
||||
isEmptyIngestionSpec,
|
||||
issueWithIoConfig,
|
||||
|
@ -119,14 +111,33 @@ import {
|
|||
MAX_INLINE_DATA_LENGTH,
|
||||
MetricSpec,
|
||||
normalizeSpec,
|
||||
NUMERIC_TIME_FORMATS,
|
||||
possibleDruidFormatForValues,
|
||||
splitFilter,
|
||||
TimestampSpec,
|
||||
Transform,
|
||||
TuningConfig,
|
||||
updateIngestionType,
|
||||
upgradeSpec,
|
||||
} from '../../utils/ingestion-spec';
|
||||
import { deepDelete, deepGet, deepSet } from '../../utils/object-change';
|
||||
} from '../../druid-models';
|
||||
import { getLink } from '../../links';
|
||||
import { AppToaster } from '../../singletons/toaster';
|
||||
import { UrlBaser } from '../../singletons/url-baser';
|
||||
import {
|
||||
deepDelete,
|
||||
deepGet,
|
||||
deepSet,
|
||||
deepSetMulti,
|
||||
EMPTY_ARRAY,
|
||||
EMPTY_OBJECT,
|
||||
filterMap,
|
||||
getDruidErrorMessage,
|
||||
localStorageGet,
|
||||
LocalStorageKeys,
|
||||
localStorageSet,
|
||||
oneOf,
|
||||
parseJson,
|
||||
pluralIfNeeded,
|
||||
QueryState,
|
||||
} from '../../utils';
|
||||
import {
|
||||
CacheRows,
|
||||
ExampleManifest,
|
||||
|
@ -146,7 +157,6 @@ import {
|
|||
SampleResponseWithExtraInfo,
|
||||
SampleStrategy,
|
||||
} from '../../utils/sampler';
|
||||
import { computeFlattenPathsForData } from '../../utils/spec-utils';
|
||||
|
||||
import { ExamplePicker } from './example-picker/example-picker';
|
||||
import { FilterTable, filterTableSelectedColumnName } from './filter-table/filter-table';
|
||||
|
@ -187,7 +197,7 @@ function showBlankLine(line: SampleEntry): string {
|
|||
}
|
||||
|
||||
function getTimestampSpec(headerAndRows: HeaderAndRows | null): TimestampSpec {
|
||||
if (!headerAndRows) return getConstantTimestampSpec();
|
||||
if (!headerAndRows) return CONSTANT_TIMESTAMP_SPEC;
|
||||
|
||||
const timestampSpecs = filterMap(headerAndRows.header, sampleHeader => {
|
||||
const possibleFormat = possibleDruidFormatForValues(
|
||||
|
@ -204,7 +214,7 @@ function getTimestampSpec(headerAndRows: HeaderAndRows | null): TimestampSpec {
|
|||
timestampSpecs.find(ts => /time/i.test(ts.column)) || // Use a suggestion that has time in the name if possible
|
||||
timestampSpecs.find(ts => !NUMERIC_TIME_FORMATS.includes(ts.format)) || // Use a suggestion that is not numeric
|
||||
timestampSpecs[0] || // Fall back to the first one
|
||||
getConstantTimestampSpec() // Ok, empty it is...
|
||||
CONSTANT_TIMESTAMP_SPEC // Ok, empty it is...
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -300,7 +310,7 @@ export interface LoadDataViewState {
|
|||
// for timestamp
|
||||
timestampQueryState: QueryState<{
|
||||
headerAndRows: HeaderAndRows;
|
||||
timestampSpec: TimestampSpec;
|
||||
spec: IngestionSpec;
|
||||
}>;
|
||||
|
||||
// for transform
|
||||
|
@ -454,7 +464,6 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
private updateSpec = (newSpec: IngestionSpec) => {
|
||||
newSpec = normalizeSpec(newSpec);
|
||||
newSpec = upgradeSpec(newSpec);
|
||||
newSpec = adjustIngestionSpec(newSpec);
|
||||
const deltaState: Partial<LoadDataViewState> = { spec: newSpec, specPreview: newSpec };
|
||||
if (!deepGet(newSpec, 'spec.ioConfig.type')) {
|
||||
deltaState.cacheRows = undefined;
|
||||
|
@ -470,7 +479,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
private applyPreviewSpec = () => {
|
||||
this.setState(state => {
|
||||
localStorageSet(LocalStorageKeys.INGESTION_SPEC, JSON.stringify(state.specPreview));
|
||||
return { spec: state.specPreview };
|
||||
return { spec: Object.assign({}, state.specPreview) };
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -577,14 +586,15 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
);
|
||||
}
|
||||
|
||||
renderApplyButtonBar() {
|
||||
renderApplyButtonBar(queryState: QueryState<unknown>) {
|
||||
const previewSpecSame = this.isPreviewSpecSame();
|
||||
const queryStateHasError = Boolean(queryState && queryState.error);
|
||||
|
||||
return (
|
||||
<FormGroup className="control-buttons">
|
||||
<Button
|
||||
text="Apply"
|
||||
disabled={previewSpecSame}
|
||||
disabled={previewSpecSame && !queryStateHasError}
|
||||
intent={Intent.PRIMARY}
|
||||
onClick={this.applyPreviewSpec}
|
||||
/>
|
||||
|
@ -1047,7 +1057,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
sampleResponse = await sampleForConnect(spec, sampleStrategy);
|
||||
} catch (e) {
|
||||
this.setState({
|
||||
inputQueryState: new QueryState({ error: e.message }),
|
||||
inputQueryState: new QueryState({ error: e }),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -1091,7 +1101,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
} else if (inputQueryState.isLoading()) {
|
||||
mainFill = <Loader />;
|
||||
} else if (inputQueryState.error) {
|
||||
mainFill = <CenterMessage>{`Error: ${inputQueryState.error.message}`}</CenterMessage>;
|
||||
mainFill = <CenterMessage>{`Error: ${inputQueryState.getErrorMessage()}`}</CenterMessage>;
|
||||
} else if (inputQueryState.data) {
|
||||
const inputData = inputQueryState.data.data;
|
||||
mainFill = (
|
||||
|
@ -1168,7 +1178,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
</Callout>
|
||||
</FormGroup>
|
||||
)}
|
||||
{(specType === 'kafka' || specType === 'kinesis') && (
|
||||
{oneOf(specType, 'kafka', 'kinesis') && (
|
||||
<FormGroup label="Where should the data be sampled from?">
|
||||
<HTMLSelect
|
||||
value={sampleStrategy}
|
||||
|
@ -1179,7 +1189,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
</HTMLSelect>
|
||||
</FormGroup>
|
||||
)}
|
||||
{this.renderApplyButtonBar()}
|
||||
{this.renderApplyButtonBar(inputQueryState)}
|
||||
</div>
|
||||
{this.renderNextBar({
|
||||
disabled: !inputQueryState.data,
|
||||
|
@ -1278,7 +1288,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
sampleResponse = await sampleForParser(spec, sampleStrategy);
|
||||
} catch (e) {
|
||||
this.setState({
|
||||
parserQueryState: new QueryState({ error: e.message }),
|
||||
parserQueryState: new QueryState({ error: e }),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -1315,7 +1325,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
} else if (parserQueryState.isLoading()) {
|
||||
mainFill = <Loader />;
|
||||
} else if (parserQueryState.error) {
|
||||
mainFill = <CenterMessage>{`Error: ${parserQueryState.error.message}`}</CenterMessage>;
|
||||
mainFill = <CenterMessage>{`Error: ${parserQueryState.getErrorMessage()}`}</CenterMessage>;
|
||||
} else if (parserQueryState.data) {
|
||||
mainFill = (
|
||||
<div className="table-with-control">
|
||||
|
@ -1380,13 +1390,13 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
{!selectedFlattenField && (
|
||||
<>
|
||||
<AutoForm
|
||||
fields={getInputFormatFormFields()}
|
||||
fields={INPUT_FORMAT_FIELDS}
|
||||
model={inputFormat}
|
||||
onChange={p =>
|
||||
this.updateSpecPreview(deepSet(spec, 'spec.ioConfig.inputFormat', p))
|
||||
}
|
||||
/>
|
||||
{this.renderApplyButtonBar()}
|
||||
{this.renderApplyButtonBar(parserQueryState)}
|
||||
</>
|
||||
)}
|
||||
{this.renderFlattenControls()}
|
||||
|
@ -1461,7 +1471,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
return (
|
||||
<div className="edit-controls">
|
||||
<AutoForm
|
||||
fields={getFlattenFieldFormFields()}
|
||||
fields={FLATTEN_FIELD_FIELDS}
|
||||
model={selectedFlattenField}
|
||||
onChange={f => this.setState({ selectedFlattenField: f })}
|
||||
/>
|
||||
|
@ -1529,7 +1539,6 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
const { spec, cacheRows } = this.state;
|
||||
const inputFormatColumns: string[] =
|
||||
deepGet(spec, 'spec.ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
const timestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec') || EMPTY_OBJECT;
|
||||
|
||||
if (!cacheRows) {
|
||||
this.setState({
|
||||
|
@ -1549,7 +1558,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
sampleResponse = await sampleForTimestamp(spec, cacheRows);
|
||||
} catch (e) {
|
||||
this.setState({
|
||||
timestampQueryState: new QueryState({ error: e.message }),
|
||||
timestampQueryState: new QueryState({ error: e }),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -1562,7 +1571,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
undefined,
|
||||
['__time'].concat(inputFormatColumns),
|
||||
),
|
||||
timestampSpec,
|
||||
spec,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
@ -1570,9 +1579,11 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
renderTimestampStep() {
|
||||
const { specPreview: spec, columnFilter, specialColumnsOnly, timestampQueryState } = this.state;
|
||||
const timestampSchema = getTimestampSchema(spec);
|
||||
const timestampSpec: TimestampSpec =
|
||||
deepGet(spec, 'spec.dataSchema.timestampSpec') || EMPTY_OBJECT;
|
||||
const timestampSpecFromColumn = isColumnTimestampSpec(timestampSpec);
|
||||
const transforms: Transform[] =
|
||||
deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
|
||||
|
||||
let mainFill: JSX.Element | string = '';
|
||||
if (timestampQueryState.isInit()) {
|
||||
|
@ -1585,7 +1596,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
} else if (timestampQueryState.isLoading()) {
|
||||
mainFill = <Loader />;
|
||||
} else if (timestampQueryState.error) {
|
||||
mainFill = <CenterMessage>{`Error: ${timestampQueryState.error.message}`}</CenterMessage>;
|
||||
mainFill = <CenterMessage>{`Error: ${timestampQueryState.getErrorMessage()}`}</CenterMessage>;
|
||||
} else if (timestampQueryState.data) {
|
||||
mainFill = (
|
||||
<div className="table-with-control">
|
||||
|
@ -1622,46 +1633,88 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<Callout className="intro">
|
||||
<p>
|
||||
Druid partitions data based on the primary time column of your data. This column is
|
||||
stored internally in Druid as <Code>__time</Code>. Please specify the primary time
|
||||
column. If you do not have any time columns, you can choose "Constant value" to create
|
||||
a default one.
|
||||
stored internally in Druid as <Code>__time</Code>.
|
||||
</p>
|
||||
<p>Configure how to define the time column for this data.</p>
|
||||
<p>
|
||||
If your data does not have a time column, you can select "None" to use a placeholder
|
||||
value. If the time information is spread across multiple columns you can combine them
|
||||
into one by selecting "Expression" and defining a transform expression.
|
||||
</p>
|
||||
<LearnMore href={`${getLink('DOCS')}/ingestion/index.html#timestampspec`} />
|
||||
</Callout>
|
||||
<FormGroup label="Timestamp spec">
|
||||
<FormGroup label="Parse timestamp from">
|
||||
<ButtonGroup>
|
||||
<Button
|
||||
text="From column"
|
||||
active={timestampSpecFromColumn}
|
||||
text="None"
|
||||
active={timestampSchema === 'none'}
|
||||
onClick={() => {
|
||||
this.updateSpecPreview(
|
||||
deepSetMulti(spec, {
|
||||
'spec.dataSchema.timestampSpec': CONSTANT_TIMESTAMP_SPEC,
|
||||
'spec.dataSchema.transformSpec.transforms': removeTimestampTransform(
|
||||
transforms,
|
||||
),
|
||||
}),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<Button
|
||||
text="Column"
|
||||
active={timestampSchema === 'column'}
|
||||
onClick={() => {
|
||||
const timestampSpec = {
|
||||
column: 'timestamp',
|
||||
format: 'auto',
|
||||
};
|
||||
this.updateSpecPreview(
|
||||
deepSet(spec, 'spec.dataSchema.timestampSpec', timestampSpec),
|
||||
deepSetMulti(spec, {
|
||||
'spec.dataSchema.timestampSpec': timestampSpec,
|
||||
'spec.dataSchema.transformSpec.transforms': removeTimestampTransform(
|
||||
transforms,
|
||||
),
|
||||
}),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<Button
|
||||
text="Constant value"
|
||||
active={!timestampSpecFromColumn}
|
||||
text="Expression"
|
||||
active={timestampSchema === 'expression'}
|
||||
onClick={() => {
|
||||
this.updateSpecPreview(
|
||||
deepSet(spec, 'spec.dataSchema.timestampSpec', getConstantTimestampSpec()),
|
||||
deepSetMulti(spec, {
|
||||
'spec.dataSchema.timestampSpec': CONSTANT_TIMESTAMP_SPEC,
|
||||
'spec.dataSchema.transformSpec.transforms': addTimestampTransform(transforms),
|
||||
}),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
</ButtonGroup>
|
||||
</FormGroup>
|
||||
<AutoForm
|
||||
fields={getTimestampSpecFormFields(timestampSpec)}
|
||||
model={timestampSpec}
|
||||
onChange={timestampSpec => {
|
||||
this.updateSpecPreview(deepSet(spec, 'spec.dataSchema.timestampSpec', timestampSpec));
|
||||
}}
|
||||
/>
|
||||
{this.renderApplyButtonBar()}
|
||||
{timestampSchema === 'expression' ? (
|
||||
<AutoForm
|
||||
fields={getTimestampExpressionFields(transforms)}
|
||||
model={transforms}
|
||||
onChange={transforms => {
|
||||
this.updateSpecPreview(
|
||||
deepSet(spec, 'spec.dataSchema.transformSpec.transforms', transforms),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<AutoForm
|
||||
fields={
|
||||
timestampSchema === 'none' ? CONSTANT_TIMESTAMP_SPEC_FIELDS : TIMESTAMP_SPEC_FIELDS
|
||||
}
|
||||
model={timestampSpec}
|
||||
onChange={timestampSpec => {
|
||||
this.updateSpecPreview(
|
||||
deepSet(spec, 'spec.dataSchema.timestampSpec', timestampSpec),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
{this.renderApplyButtonBar(timestampQueryState)}
|
||||
</div>
|
||||
{this.renderNextBar({
|
||||
disabled: !timestampQueryState.data,
|
||||
|
@ -1700,7 +1753,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
sampleResponse = await sampleForTransform(spec, cacheRows);
|
||||
} catch (e) {
|
||||
this.setState({
|
||||
transformQueryState: new QueryState({ error: e.message }),
|
||||
transformQueryState: new QueryState({ error: e }),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -1734,7 +1787,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
} else if (transformQueryState.isLoading()) {
|
||||
mainFill = <Loader />;
|
||||
} else if (transformQueryState.error) {
|
||||
mainFill = <CenterMessage>{`Error: ${transformQueryState.error.message}`}</CenterMessage>;
|
||||
mainFill = <CenterMessage>{`Error: ${transformQueryState.getErrorMessage()}`}</CenterMessage>;
|
||||
} else if (transformQueryState.data) {
|
||||
mainFill = (
|
||||
<div className="table-with-control">
|
||||
|
@ -1834,7 +1887,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
return (
|
||||
<div className="edit-controls">
|
||||
<AutoForm
|
||||
fields={getTransformFormFields()}
|
||||
fields={TRANSFORM_FIELDS}
|
||||
model={selectedTransform}
|
||||
onChange={selectedTransform => this.setState({ selectedTransform })}
|
||||
/>
|
||||
|
@ -1915,7 +1968,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
sampleResponse = await sampleForFilter(spec, cacheRows);
|
||||
} catch (e) {
|
||||
this.setState({
|
||||
filterQueryState: new QueryState({ error: e.message }),
|
||||
filterQueryState: new QueryState({ error: e }),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -1941,7 +1994,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
sampleResponseNoFilter = await sampleForFilter(specNoFilter, cacheRows);
|
||||
} catch (e) {
|
||||
this.setState({
|
||||
filterQueryState: new QueryState({ error: e.message }),
|
||||
filterQueryState: new QueryState({ error: e }),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -1976,7 +2029,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
} else if (filterQueryState.isLoading()) {
|
||||
mainFill = <Loader />;
|
||||
} else if (filterQueryState.error) {
|
||||
mainFill = <CenterMessage>{`Error: ${filterQueryState.error.message}`}</CenterMessage>;
|
||||
mainFill = <CenterMessage>{`Error: ${filterQueryState.getErrorMessage()}`}</CenterMessage>;
|
||||
} else if (filterQueryState.data) {
|
||||
mainFill = (
|
||||
<div className="table-with-control">
|
||||
|
@ -2048,10 +2101,10 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
return (
|
||||
<div className="edit-controls">
|
||||
<AutoForm
|
||||
fields={getFilterFormFields()}
|
||||
fields={FILTER_FIELDS}
|
||||
model={selectedFilter}
|
||||
onChange={f => this.setState({ selectedFilter: f })}
|
||||
showCustom={f => !['selector', 'in', 'regex', 'like', 'not'].includes(f.type)}
|
||||
showCustom={f => !oneOf(f.type, 'selector', 'in', 'regex', 'like', 'not')}
|
||||
/>
|
||||
<div className="control-buttons">
|
||||
<Button
|
||||
|
@ -2122,12 +2175,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
label: 'Time intervals',
|
||||
type: 'string-array',
|
||||
placeholder: 'ex: 2018-01-01/2018-06-01',
|
||||
info: (
|
||||
<>
|
||||
A comma separated list of intervals for the raw data being ingested. Ignored for
|
||||
real-time ingestion.
|
||||
</>
|
||||
),
|
||||
info: <>A comma separated list of intervals for the raw data being ingested.</>,
|
||||
},
|
||||
]}
|
||||
model={spec}
|
||||
|
@ -2202,7 +2250,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
sampleResponse = await sampleForSchema(spec, cacheRows);
|
||||
} catch (e) {
|
||||
this.setState({
|
||||
schemaQueryState: new QueryState({ error: e.message }),
|
||||
schemaQueryState: new QueryState({ error: e }),
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -2242,7 +2290,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
} else if (schemaQueryState.isLoading()) {
|
||||
mainFill = <Loader />;
|
||||
} else if (schemaQueryState.error) {
|
||||
mainFill = <CenterMessage>{`Error: ${schemaQueryState.error.message}`}</CenterMessage>;
|
||||
mainFill = <CenterMessage>{`Error: ${schemaQueryState.getErrorMessage()}`}</CenterMessage>;
|
||||
} else if (schemaQueryState.data) {
|
||||
mainFill = (
|
||||
<div className="table-with-control">
|
||||
|
@ -2362,7 +2410,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<Switch
|
||||
checked={rollup}
|
||||
onChange={() => this.setState({ newRollup: !rollup })}
|
||||
labelElement="Rollup"
|
||||
label="Rollup"
|
||||
/>
|
||||
</FormGroupWithInfo>
|
||||
<AutoForm
|
||||
|
@ -2394,6 +2442,17 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
</div>
|
||||
{this.renderNextBar({
|
||||
disabled: !schemaQueryState.data,
|
||||
onNextStep: () => {
|
||||
let newSpec = spec;
|
||||
if (rollup) {
|
||||
newSpec = deepSet(newSpec, 'spec.tuningConfig.partitionsSpec', { type: 'hashed' });
|
||||
newSpec = deepSet(newSpec, 'spec.tuningConfig.forceGuaranteedRollup', true);
|
||||
} else {
|
||||
newSpec = deepSet(newSpec, 'spec.tuningConfig.partitionsSpec', { type: 'dynamic' });
|
||||
newSpec = deepDelete(newSpec, 'spec.tuningConfig.forceGuaranteedRollup');
|
||||
}
|
||||
this.updateSpec(newSpec);
|
||||
},
|
||||
})}
|
||||
</>
|
||||
);
|
||||
|
@ -2544,7 +2603,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
return (
|
||||
<div className="edit-controls">
|
||||
<AutoForm
|
||||
fields={getDimensionSpecFormFields()}
|
||||
fields={DIMENSION_SPEC_FIELDS}
|
||||
model={selectedDimensionSpec}
|
||||
onChange={selectedDimensionSpec => this.setState({ selectedDimensionSpec })}
|
||||
/>
|
||||
|
@ -2667,7 +2726,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
return (
|
||||
<div className="edit-controls">
|
||||
<AutoForm
|
||||
fields={getMetricSpecFormFields()}
|
||||
fields={METRIC_SPEC_FIELDS}
|
||||
model={selectedMetricSpec}
|
||||
onChange={selectedMetricSpec => this.setState({ selectedMetricSpec })}
|
||||
/>
|
||||
|
@ -2742,6 +2801,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
const tuningConfig: TuningConfig = deepGet(spec, 'spec.tuningConfig') || EMPTY_OBJECT;
|
||||
const granularitySpec: GranularitySpec =
|
||||
deepGet(spec, 'spec.dataSchema.granularitySpec') || EMPTY_OBJECT;
|
||||
const isStreaming = oneOf(spec.type, 'kafka', 'kinesis');
|
||||
|
||||
return (
|
||||
<>
|
||||
|
@ -2774,25 +2834,25 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
model={granularitySpec}
|
||||
onChange={g => this.updateSpec(deepSet(spec, 'spec.dataSchema.granularitySpec', g))}
|
||||
/>
|
||||
<AutoForm
|
||||
fields={[
|
||||
{
|
||||
name: 'spec.dataSchema.granularitySpec.intervals',
|
||||
label: 'Time intervals',
|
||||
type: 'string-array',
|
||||
placeholder: 'ex: 2018-01-01/2018-06-01',
|
||||
required: spec => Boolean(deepGet(spec, 'spec.tuningConfig.forceGuaranteedRollup')),
|
||||
info: (
|
||||
<>
|
||||
A comma separated list of intervals for the raw data being ingested. Ignored for
|
||||
real-time ingestion.
|
||||
</>
|
||||
),
|
||||
},
|
||||
]}
|
||||
model={spec}
|
||||
onChange={s => this.updateSpec(s)}
|
||||
/>
|
||||
{!isStreaming && (
|
||||
<AutoForm
|
||||
fields={[
|
||||
{
|
||||
name: 'spec.dataSchema.granularitySpec.intervals',
|
||||
label: 'Time intervals',
|
||||
type: 'string-array',
|
||||
placeholder: 'ex: 2018-01-01/2018-06-01',
|
||||
required: spec =>
|
||||
['hashed', 'single_dim'].includes(
|
||||
deepGet(spec, 'spec.tuningConfig.partitionsSpec.type'),
|
||||
),
|
||||
info: <>A comma separated list of intervals for the raw data being ingested.</>,
|
||||
},
|
||||
]}
|
||||
model={spec}
|
||||
onChange={s => this.updateSpec(s)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="other">
|
||||
<H5>Secondary partitioning</H5>
|
||||
|
@ -2904,7 +2964,8 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
label: 'Append to existing',
|
||||
type: 'boolean',
|
||||
defaultValue: false,
|
||||
defined: spec => !deepGet(spec, 'spec.tuningConfig.forceGuaranteedRollup'),
|
||||
defined: spec =>
|
||||
deepGet(spec, 'spec.tuningConfig.partitionsSpec.type') === 'dynamic',
|
||||
info: (
|
||||
<>
|
||||
Creates segments as additional shards of the latest version, effectively
|
||||
|
|
|
@ -22,8 +22,8 @@ import ReactTable from 'react-table';
|
|||
|
||||
import { TableCell } from '../../../components';
|
||||
import { TableCellUnparseable } from '../../../components/table-cell-unparseable/table-cell-unparseable';
|
||||
import { FlattenField } from '../../../druid-models';
|
||||
import { caseInsensitiveContains, filterMap } from '../../../utils';
|
||||
import { FlattenField } from '../../../utils/ingestion-spec';
|
||||
import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
|
||||
|
||||
import './parse-data-table.scss';
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
import { render } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
|
||||
import { getDummyTimestampSpec } from '../../../utils/ingestion-spec';
|
||||
import { IngestionSpec, PLACEHOLDER_TIMESTAMP_SPEC } from '../../../druid-models';
|
||||
import { deepSet } from '../../../utils';
|
||||
|
||||
import { ParseTimeTable } from './parse-time-table';
|
||||
|
||||
|
@ -35,11 +36,17 @@ describe('parse time table', () => {
|
|||
],
|
||||
};
|
||||
|
||||
const spec = deepSet(
|
||||
{} as IngestionSpec,
|
||||
'spec.dataSchema.timestampSpec',
|
||||
PLACEHOLDER_TIMESTAMP_SPEC,
|
||||
);
|
||||
|
||||
const parseTimeTable = (
|
||||
<ParseTimeTable
|
||||
sampleBundle={{
|
||||
headerAndRows: sampleData,
|
||||
timestampSpec: getDummyTimestampSpec(),
|
||||
spec,
|
||||
}}
|
||||
columnFilter=""
|
||||
possibleTimestampColumnsOnly={false}
|
||||
|
|
|
@ -22,13 +22,14 @@ import ReactTable from 'react-table';
|
|||
|
||||
import { TableCell } from '../../../components';
|
||||
import { TableCellUnparseable } from '../../../components/table-cell-unparseable/table-cell-unparseable';
|
||||
import { caseInsensitiveContains, filterMap } from '../../../utils';
|
||||
import { possibleDruidFormatForValues } from '../../../utils/druid-time';
|
||||
import {
|
||||
getTimestampSpecColumn,
|
||||
isColumnTimestampSpec,
|
||||
getTimestampDetailFromSpec,
|
||||
getTimestampSpecColumnFromSpec,
|
||||
IngestionSpec,
|
||||
possibleDruidFormatForValues,
|
||||
TimestampSpec,
|
||||
} from '../../../utils/ingestion-spec';
|
||||
} from '../../../druid-models';
|
||||
import { caseInsensitiveContains, filterMap } from '../../../utils';
|
||||
import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
|
||||
|
||||
import './parse-time-table.scss';
|
||||
|
@ -46,7 +47,7 @@ export function parseTimeTableSelectedColumnName(
|
|||
export interface ParseTimeTableProps {
|
||||
sampleBundle: {
|
||||
headerAndRows: HeaderAndRows;
|
||||
timestampSpec: TimestampSpec;
|
||||
spec: IngestionSpec;
|
||||
};
|
||||
columnFilter: string;
|
||||
possibleTimestampColumnsOnly: boolean;
|
||||
|
@ -62,9 +63,9 @@ export const ParseTimeTable = React.memo(function ParseTimeTable(props: ParseTim
|
|||
selectedColumnName,
|
||||
onTimestampColumnSelect,
|
||||
} = props;
|
||||
const { headerAndRows, timestampSpec } = sampleBundle;
|
||||
const timestampSpecColumn = getTimestampSpecColumn(timestampSpec);
|
||||
const timestampSpecFromColumn = isColumnTimestampSpec(timestampSpec);
|
||||
const { headerAndRows, spec } = sampleBundle;
|
||||
const timestampSpecColumn = getTimestampSpecColumnFromSpec(spec);
|
||||
const timestampDetail = getTimestampDetailFromSpec(spec);
|
||||
|
||||
return (
|
||||
<ReactTable
|
||||
|
@ -73,27 +74,27 @@ export const ParseTimeTable = React.memo(function ParseTimeTable(props: ParseTim
|
|||
columns={filterMap(
|
||||
headerAndRows.header.length ? headerAndRows.header : ['__error__'],
|
||||
(columnName, i) => {
|
||||
const timestamp = columnName === '__time';
|
||||
if (!timestamp && !caseInsensitiveContains(columnName, columnFilter)) return;
|
||||
const used = timestampSpec.column === columnName;
|
||||
const possibleFormat = timestamp
|
||||
const isTimestamp = columnName === '__time';
|
||||
if (!isTimestamp && !caseInsensitiveContains(columnName, columnFilter)) return;
|
||||
const used = timestampSpecColumn === columnName;
|
||||
const possibleFormat = isTimestamp
|
||||
? null
|
||||
: possibleDruidFormatForValues(
|
||||
filterMap(headerAndRows.rows, d => (d.parsed ? d.parsed[columnName] : undefined)),
|
||||
);
|
||||
if (possibleTimestampColumnsOnly && !timestamp && !possibleFormat) return;
|
||||
if (possibleTimestampColumnsOnly && !isTimestamp && !possibleFormat) return;
|
||||
|
||||
const columnClassName = classNames({
|
||||
timestamp,
|
||||
timestamp: isTimestamp,
|
||||
used,
|
||||
selected: selectedColumnName === columnName,
|
||||
});
|
||||
return {
|
||||
Header: (
|
||||
<div
|
||||
className={classNames({ clickable: !timestamp })}
|
||||
className={classNames({ clickable: !isTimestamp })}
|
||||
onClick={
|
||||
timestamp
|
||||
isTimestamp
|
||||
? undefined
|
||||
: () => {
|
||||
onTimestampColumnSelect({
|
||||
|
@ -105,11 +106,7 @@ export const ParseTimeTable = React.memo(function ParseTimeTable(props: ParseTim
|
|||
>
|
||||
<div className="column-name">{columnName}</div>
|
||||
<div className="column-detail">
|
||||
{timestamp
|
||||
? timestampSpecFromColumn
|
||||
? `from: '${timestampSpecColumn}'`
|
||||
: `mv: ${timestampSpec.missingValue}`
|
||||
: possibleFormat || ''}
|
||||
{isTimestamp ? timestampDetail : possibleFormat || ''}
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
@ -123,12 +120,12 @@ export const ParseTimeTable = React.memo(function ParseTimeTable(props: ParseTim
|
|||
return <TableCell value={row.original.error} />;
|
||||
}
|
||||
if (row.original.unparseable) {
|
||||
return <TableCellUnparseable timestamp={timestamp} />;
|
||||
return <TableCellUnparseable timestamp={isTimestamp} />;
|
||||
}
|
||||
return <TableCell value={timestamp ? new Date(row.value) : row.value} />;
|
||||
return <TableCell value={isTimestamp ? new Date(row.value) : row.value} />;
|
||||
},
|
||||
minWidth: timestamp ? 200 : 100,
|
||||
resizable: !timestamp,
|
||||
minWidth: isTimestamp ? 200 : 100,
|
||||
resizable: !isTimestamp,
|
||||
};
|
||||
},
|
||||
)}
|
||||
|
|
|
@ -21,7 +21,6 @@ import React from 'react';
|
|||
import ReactTable from 'react-table';
|
||||
|
||||
import { TableCell } from '../../../components';
|
||||
import { caseInsensitiveContains, filterMap, sortWithPrefixSuffix } from '../../../utils';
|
||||
import {
|
||||
DimensionSpec,
|
||||
DimensionsSpec,
|
||||
|
@ -30,7 +29,8 @@ import {
|
|||
getMetricSpecName,
|
||||
inflateDimensionSpec,
|
||||
MetricSpec,
|
||||
} from '../../../utils/ingestion-spec';
|
||||
} from '../../../druid-models';
|
||||
import { caseInsensitiveContains, filterMap, sortWithPrefixSuffix } from '../../../utils';
|
||||
import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
|
||||
|
||||
import './schema-table.scss';
|
||||
|
@ -99,7 +99,7 @@ export const SchemaTable = React.memo(function SchemaTable(props: SchemaTablePro
|
|||
className: columnClassName,
|
||||
id: String(i),
|
||||
accessor: (row: SampleEntry) => (row.parsed ? row.parsed[columnName] : null),
|
||||
Cell: row => <TableCell value={row.value} />,
|
||||
Cell: ({ value }) => <TableCell value={value} />,
|
||||
};
|
||||
} else {
|
||||
const timestamp = columnName === '__time';
|
||||
|
|
|
@ -21,9 +21,9 @@ import React from 'react';
|
|||
import ReactTable from 'react-table';
|
||||
|
||||
import { TableCell } from '../../../components';
|
||||
import { Transform } from '../../../druid-models';
|
||||
import { caseInsensitiveContains, filterMap } from '../../../utils';
|
||||
import { escapeColumnName } from '../../../utils/druid-expression';
|
||||
import { Transform } from '../../../utils/ingestion-spec';
|
||||
import { HeaderAndRows, SampleEntry } from '../../../utils/sampler';
|
||||
|
||||
import './transform-table.scss';
|
||||
|
|
|
@ -32,8 +32,8 @@ import {
|
|||
ViewControlBar,
|
||||
} from '../../components';
|
||||
import { AsyncActionDialog, LookupEditDialog } from '../../dialogs/';
|
||||
import { LookupSpec } from '../../dialogs/lookup-edit-dialog/lookup-edit-dialog';
|
||||
import { LookupTableActionDialog } from '../../dialogs/lookup-table-action-dialog/lookup-table-action-dialog';
|
||||
import { LookupSpec } from '../../druid-models';
|
||||
import { AppToaster } from '../../singletons/toaster';
|
||||
import {
|
||||
getDruidErrorMessage,
|
||||
|
@ -167,7 +167,7 @@ export class LookupsView extends React.PureComponent<LookupsViewProps, LookupsVi
|
|||
const lookupEntriesAndTiers = lookupEntriesAndTiersState.data;
|
||||
if (!lookupEntriesAndTiers) return;
|
||||
|
||||
const target: any = lookupEntriesAndTiers.lookupEntries.find((lookupEntry: any) => {
|
||||
const target: any = lookupEntriesAndTiers.lookupEntries.find(lookupEntry => {
|
||||
return lookupEntry.tier === tier && lookupEntry.id === id;
|
||||
});
|
||||
if (id === '') {
|
||||
|
@ -179,7 +179,7 @@ export class LookupsView extends React.PureComponent<LookupsViewProps, LookupsVi
|
|||
lookupEdit: {
|
||||
name: '',
|
||||
tier: loadingEntriesAndTiers ? loadingEntriesAndTiers.tiers[0] : '',
|
||||
spec: { type: '' },
|
||||
spec: { type: 'map', map: {} },
|
||||
version: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
|
|
@ -31,8 +31,7 @@ import React, { ChangeEvent } from 'react';
|
|||
|
||||
import { Loader } from '../../../components';
|
||||
import { Deferred } from '../../../components/deferred/deferred';
|
||||
import { copyAndAlert, groupBy, prettyPrintSql } from '../../../utils';
|
||||
import { ColumnMetadata } from '../../../utils/column-metadata';
|
||||
import { ColumnMetadata, copyAndAlert, groupBy, oneOf, prettyPrintSql } from '../../../utils';
|
||||
import { dataTypeToIcon } from '../query-utils';
|
||||
|
||||
import { NumberMenuItems, StringMenuItems, TimeMenuItems } from './column-tree-menu';
|
||||
|
@ -340,8 +339,7 @@ export class ColumnTree extends React.PureComponent<ColumnTreeProps, ColumnTreeS
|
|||
}}
|
||||
/>
|
||||
{parsedQuery &&
|
||||
(columnData.DATA_TYPE === 'BIGINT' ||
|
||||
columnData.DATA_TYPE === 'FLOAT') && (
|
||||
oneOf(columnData.DATA_TYPE, 'BIGINT', 'FLOAT') && (
|
||||
<NumberMenuItems
|
||||
table={tableName}
|
||||
schema={schemaName}
|
||||
|
|
|
@ -31,9 +31,8 @@ import ReactTable from 'react-table';
|
|||
|
||||
import { BracedText, TableCell } from '../../../components';
|
||||
import { ShowValueDialog } from '../../../dialogs/show-value-dialog/show-value-dialog';
|
||||
import { copyAndAlert, filterMap, prettyPrintSql } from '../../../utils';
|
||||
import { copyAndAlert, deepSet, filterMap, prettyPrintSql } from '../../../utils';
|
||||
import { BasicAction, basicActionsToMenu } from '../../../utils/basic-action';
|
||||
import { deepSet } from '../../../utils/object-change';
|
||||
|
||||
import { ColumnRenameInput } from './column-rename-input/column-rename-input';
|
||||
|
||||
|
|
|
@ -48,6 +48,8 @@ exports[`segments-view matches snapshot 1`] = `
|
|||
"Start",
|
||||
"End",
|
||||
"Version",
|
||||
"Time span",
|
||||
"Partitioning",
|
||||
"Partition",
|
||||
"Size",
|
||||
"Num rows",
|
||||
|
@ -163,6 +165,22 @@ exports[`segments-view matches snapshot 1`] = `
|
|||
"show": true,
|
||||
"width": 120,
|
||||
},
|
||||
Object {
|
||||
"Cell": [Function],
|
||||
"Header": "Time span",
|
||||
"accessor": "time_span",
|
||||
"filterable": true,
|
||||
"show": true,
|
||||
"width": 100,
|
||||
},
|
||||
Object {
|
||||
"Cell": [Function],
|
||||
"Header": "Partitioning",
|
||||
"accessor": "partitioning",
|
||||
"filterable": true,
|
||||
"show": true,
|
||||
"width": 100,
|
||||
},
|
||||
Object {
|
||||
"Header": "Partition",
|
||||
"accessor": "partition_num",
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
import { Button, ButtonGroup, Intent, Label, MenuItem } from '@blueprintjs/core';
|
||||
import { IconNames } from '@blueprintjs/icons';
|
||||
import axios from 'axios';
|
||||
import { SqlExpression, SqlRef } from 'druid-query-toolkit';
|
||||
import React from 'react';
|
||||
import ReactTable, { Filter } from 'react-table';
|
||||
|
||||
|
@ -38,6 +39,7 @@ import { SegmentTableActionDialog } from '../../dialogs/segments-table-action-di
|
|||
import {
|
||||
addFilter,
|
||||
compact,
|
||||
deepGet,
|
||||
filterMap,
|
||||
formatBytes,
|
||||
formatInteger,
|
||||
|
@ -61,6 +63,8 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
|
|||
'Start',
|
||||
'End',
|
||||
'Version',
|
||||
'Time span',
|
||||
'Partitioning',
|
||||
'Partition',
|
||||
'Size',
|
||||
'Num rows',
|
||||
|
@ -87,6 +91,7 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
|
|||
'Start',
|
||||
'End',
|
||||
'Version',
|
||||
'Partitioning',
|
||||
'Partition',
|
||||
'Size',
|
||||
'Num rows',
|
||||
|
@ -127,7 +132,9 @@ interface SegmentQueryResultRow {
|
|||
end: string;
|
||||
segment_id: string;
|
||||
version: string;
|
||||
size: 0;
|
||||
time_span: string;
|
||||
partitioning: string;
|
||||
size: number;
|
||||
partition_num: number;
|
||||
num_rows: number;
|
||||
num_replicas: number;
|
||||
|
@ -153,6 +160,31 @@ export interface SegmentsViewState {
|
|||
export class SegmentsView extends React.PureComponent<SegmentsViewProps, SegmentsViewState> {
|
||||
static PAGE_SIZE = 25;
|
||||
|
||||
static WITH_QUERY = `WITH s AS (
|
||||
SELECT
|
||||
"segment_id", "datasource", "start", "end", "size", "version",
|
||||
CASE
|
||||
WHEN "start" LIKE '%-01-01T00:00:00.000Z' AND "end" LIKE '%-01-01T00:00:00.000Z' THEN 'Year'
|
||||
WHEN "start" LIKE '%-01T00:00:00.000Z' AND "end" LIKE '%-01T00:00:00.000Z' THEN 'Month'
|
||||
WHEN "start" LIKE '%T00:00:00.000Z' AND "end" LIKE '%T00:00:00.000Z' THEN 'Day'
|
||||
WHEN "start" LIKE '%:00:00.000Z' AND "end" LIKE '%:00:00.000Z' THEN 'Hour'
|
||||
WHEN "start" LIKE '%:00.000Z' AND "end" LIKE '%:00.000Z' THEN 'Minute'
|
||||
ELSE 'Sub minute'
|
||||
END AS "time_span",
|
||||
CASE
|
||||
WHEN "shard_spec" LIKE '%"type":"numbered"%' THEN 'dynamic'
|
||||
WHEN "shard_spec" LIKE '%"type":"hashed"%' THEN 'hashed'
|
||||
WHEN "shard_spec" LIKE '%"type":"single"%' THEN 'single_dim'
|
||||
WHEN "shard_spec" LIKE '%"type":"none"%' THEN 'none'
|
||||
WHEN "shard_spec" LIKE '%"type":"linear"%' THEN 'linear'
|
||||
WHEN "shard_spec" LIKE '%"type":"numbered_overwrite"%' THEN 'numbered_overwrite'
|
||||
ELSE '-'
|
||||
END AS "partitioning",
|
||||
"partition_num", "num_replicas", "num_rows",
|
||||
"is_published", "is_available", "is_realtime", "is_overshadowed"
|
||||
FROM sys.segments
|
||||
)`;
|
||||
|
||||
private segmentsSqlQueryManager: QueryManager<SegmentsQuery, SegmentQueryResultRow[]>;
|
||||
private segmentsNoSqlQueryManager: QueryManager<null, SegmentQueryResultRow[]>;
|
||||
|
||||
|
@ -178,12 +210,10 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
this.segmentsSqlQueryManager = new QueryManager({
|
||||
debounceIdle: 500,
|
||||
processQuery: async (query: SegmentsQuery, _cancelToken, setIntermediateQuery) => {
|
||||
const totalQuerySize = (query.page + 1) * query.pageSize;
|
||||
|
||||
const whereParts = filterMap(query.filtered, (f: Filter) => {
|
||||
if (f.id.startsWith('is_')) {
|
||||
if (f.value === 'all') return;
|
||||
return `${JSON.stringify(f.id)} = ${f.value === 'true' ? 1 : 0}`;
|
||||
return SqlRef.columnWithQuotes(f.id).equal(f.value === 'true' ? 1 : 0);
|
||||
} else {
|
||||
return sqlQueryCustomTableFilter(f);
|
||||
}
|
||||
|
@ -193,17 +223,18 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
|
||||
let whereClause = '';
|
||||
if (whereParts.length) {
|
||||
whereClause = whereParts.join(' AND ');
|
||||
whereClause = SqlExpression.and(...whereParts).toString();
|
||||
}
|
||||
|
||||
if (query.groupByInterval) {
|
||||
const innerQuery = compact([
|
||||
`SELECT "start" || '/' || "end" AS "interval"`,
|
||||
`FROM sys.segments`,
|
||||
whereClause ? `WHERE ${whereClause}` : '',
|
||||
whereClause ? `WHERE ${whereClause}` : undefined,
|
||||
`GROUP BY 1`,
|
||||
`ORDER BY 1 DESC`,
|
||||
`LIMIT ${totalQuerySize}`,
|
||||
`LIMIT ${query.pageSize}`,
|
||||
query.page ? `OFFSET ${query.page * query.pageSize}` : undefined,
|
||||
]).join('\n');
|
||||
|
||||
const intervals: string = (await queryDruidSql({ query: innerQuery }))
|
||||
|
@ -211,10 +242,9 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
.join(', ');
|
||||
|
||||
queryParts = compact([
|
||||
`SELECT`,
|
||||
` ("start" || '/' || "end") AS "interval",`,
|
||||
` "segment_id", "datasource", "start", "end", "size", "version", "partition_num", "num_replicas", "num_rows", "is_published", "is_available", "is_realtime", "is_overshadowed"`,
|
||||
`FROM sys.segments`,
|
||||
SegmentsView.WITH_QUERY,
|
||||
`SELECT "start" || '/' || "end" AS "interval", *`,
|
||||
`FROM s`,
|
||||
`WHERE`,
|
||||
intervals ? ` ("start" || '/' || "end") IN (${intervals})` : 'FALSE',
|
||||
whereClause ? ` AND ${whereClause}` : '',
|
||||
|
@ -229,12 +259,9 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
);
|
||||
}
|
||||
|
||||
queryParts.push(`LIMIT ${totalQuerySize * 1000}`);
|
||||
queryParts.push(`LIMIT ${query.pageSize * 1000}`);
|
||||
} else {
|
||||
queryParts = [
|
||||
`SELECT "segment_id", "datasource", "start", "end", "size", "version", "partition_num", "num_replicas", "num_rows", "is_published", "is_available", "is_realtime", "is_overshadowed"`,
|
||||
`FROM sys.segments`,
|
||||
];
|
||||
queryParts = [SegmentsView.WITH_QUERY, `SELECT *`, `FROM s`];
|
||||
|
||||
if (whereClause) {
|
||||
queryParts.push(`WHERE ${whereClause}`);
|
||||
|
@ -249,11 +276,15 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
);
|
||||
}
|
||||
|
||||
queryParts.push(`LIMIT ${totalQuerySize}`);
|
||||
queryParts.push(`LIMIT ${query.pageSize}`);
|
||||
|
||||
if (query.page) {
|
||||
queryParts.push(`OFFSET ${query.page * query.pageSize}`);
|
||||
}
|
||||
}
|
||||
const sqlQuery = queryParts.join('\n');
|
||||
setIntermediateQuery(sqlQuery);
|
||||
return (await queryDruidSql({ query: sqlQuery })).slice(query.page * query.pageSize);
|
||||
return await queryDruidSql({ query: sqlQuery });
|
||||
},
|
||||
onStateChange: segmentsState => {
|
||||
this.setState({
|
||||
|
@ -270,23 +301,27 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
const segments = (await axios.get(`/druid/coordinator/v1/datasources/${d}?full`)).data
|
||||
.segments;
|
||||
|
||||
return segments.map((segment: any) => {
|
||||
return {
|
||||
segment_id: segment.identifier,
|
||||
datasource: segment.dataSource,
|
||||
start: segment.interval.split('/')[0],
|
||||
end: segment.interval.split('/')[1],
|
||||
version: segment.version,
|
||||
partition_num: segment.shardSpec.partitionNum ? 0 : segment.shardSpec.partitionNum,
|
||||
size: segment.size,
|
||||
num_rows: -1,
|
||||
num_replicas: -1,
|
||||
is_available: -1,
|
||||
is_published: -1,
|
||||
is_realtime: -1,
|
||||
is_overshadowed: -1,
|
||||
};
|
||||
});
|
||||
return segments.map(
|
||||
(segment: any): SegmentQueryResultRow => {
|
||||
return {
|
||||
segment_id: segment.identifier,
|
||||
datasource: segment.dataSource,
|
||||
start: segment.interval.split('/')[0],
|
||||
end: segment.interval.split('/')[1],
|
||||
version: segment.version,
|
||||
time_span: '-',
|
||||
partitioning: '-',
|
||||
partition_num: deepGet(segment, 'shardSpec.partitionNum') || 0,
|
||||
size: segment.size,
|
||||
num_rows: -1,
|
||||
num_replicas: -1,
|
||||
is_available: -1,
|
||||
is_published: -1,
|
||||
is_realtime: -1,
|
||||
is_overshadowed: -1,
|
||||
};
|
||||
},
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
|
@ -387,6 +422,23 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
|
||||
const numRowsValues = segments.map(d => formatInteger(d.num_rows)).concat('(unknown)');
|
||||
|
||||
const renderFilterableCell = (field: string) => {
|
||||
return (row: { value: any }) => {
|
||||
const value = row.value;
|
||||
return (
|
||||
<a
|
||||
onClick={() => {
|
||||
this.setState({
|
||||
segmentFilter: addFilter(segmentFilter, field, value),
|
||||
});
|
||||
}}
|
||||
>
|
||||
{value}
|
||||
</a>
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
return (
|
||||
<ReactTable
|
||||
data={segments}
|
||||
|
@ -421,18 +473,7 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
Header: 'Datasource',
|
||||
show: hiddenColumns.exists('Datasource'),
|
||||
accessor: 'datasource',
|
||||
Cell: row => {
|
||||
const value = row.value;
|
||||
return (
|
||||
<a
|
||||
onClick={() => {
|
||||
this.setState({ segmentFilter: addFilter(segmentFilter, 'datasource', value) });
|
||||
}}
|
||||
>
|
||||
{value}
|
||||
</a>
|
||||
);
|
||||
},
|
||||
Cell: renderFilterableCell('datasource'),
|
||||
},
|
||||
{
|
||||
Header: 'Interval',
|
||||
|
@ -440,18 +481,7 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
accessor: 'interval',
|
||||
width: 120,
|
||||
defaultSortDesc: true,
|
||||
Cell: row => {
|
||||
const value = row.value;
|
||||
return (
|
||||
<a
|
||||
onClick={() => {
|
||||
this.setState({ segmentFilter: addFilter(segmentFilter, 'interval', value) });
|
||||
}}
|
||||
>
|
||||
{value}
|
||||
</a>
|
||||
);
|
||||
},
|
||||
Cell: renderFilterableCell('interval'),
|
||||
},
|
||||
{
|
||||
Header: 'Start',
|
||||
|
@ -459,18 +489,7 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
accessor: 'start',
|
||||
width: 120,
|
||||
defaultSortDesc: true,
|
||||
Cell: row => {
|
||||
const value = row.value;
|
||||
return (
|
||||
<a
|
||||
onClick={() => {
|
||||
this.setState({ segmentFilter: addFilter(segmentFilter, 'start', value) });
|
||||
}}
|
||||
>
|
||||
{value}
|
||||
</a>
|
||||
);
|
||||
},
|
||||
Cell: renderFilterableCell('start'),
|
||||
},
|
||||
{
|
||||
Header: 'End',
|
||||
|
@ -478,18 +497,7 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
accessor: 'end',
|
||||
defaultSortDesc: true,
|
||||
width: 120,
|
||||
Cell: row => {
|
||||
const value = row.value;
|
||||
return (
|
||||
<a
|
||||
onClick={() => {
|
||||
this.setState({ segmentFilter: addFilter(segmentFilter, 'end', value) });
|
||||
}}
|
||||
>
|
||||
{value}
|
||||
</a>
|
||||
);
|
||||
},
|
||||
Cell: renderFilterableCell('end'),
|
||||
},
|
||||
{
|
||||
Header: 'Version',
|
||||
|
@ -498,6 +506,22 @@ export class SegmentsView extends React.PureComponent<SegmentsViewProps, Segment
|
|||
defaultSortDesc: true,
|
||||
width: 120,
|
||||
},
|
||||
{
|
||||
Header: 'Time span',
|
||||
show: capabilities.hasSql() && hiddenColumns.exists('Time span'),
|
||||
accessor: 'time_span',
|
||||
width: 100,
|
||||
filterable: true,
|
||||
Cell: renderFilterableCell('time_span'),
|
||||
},
|
||||
{
|
||||
Header: 'Partitioning',
|
||||
show: capabilities.hasSql() && hiddenColumns.exists('Partitioning'),
|
||||
accessor: 'partitioning',
|
||||
width: 100,
|
||||
filterable: true,
|
||||
Cell: renderFilterableCell('partitioning'),
|
||||
},
|
||||
{
|
||||
Header: 'Partition',
|
||||
show: hiddenColumns.exists('Partition'),
|
||||
|
|
|
@ -26,12 +26,7 @@ import { ServicesView } from './services-view';
|
|||
describe('services view', () => {
|
||||
it('action services view', () => {
|
||||
const servicesView = shallow(
|
||||
<ServicesView
|
||||
middleManager={'test'}
|
||||
goToQuery={() => {}}
|
||||
goToTask={() => {}}
|
||||
capabilities={Capabilities.FULL}
|
||||
/>,
|
||||
<ServicesView goToQuery={() => {}} goToTask={() => {}} capabilities={Capabilities.FULL} />,
|
||||
);
|
||||
expect(servicesView).toMatchSnapshot();
|
||||
});
|
||||
|
|
|
@ -37,18 +37,20 @@ import {
|
|||
import { AsyncActionDialog } from '../../dialogs';
|
||||
import {
|
||||
addFilter,
|
||||
Capabilities,
|
||||
CapabilitiesMode,
|
||||
deepGet,
|
||||
formatBytes,
|
||||
formatBytesCompact,
|
||||
LocalStorageKeys,
|
||||
lookupBy,
|
||||
oneOf,
|
||||
queryDruidSql,
|
||||
QueryManager,
|
||||
QueryState,
|
||||
} from '../../utils';
|
||||
import { BasicAction } from '../../utils/basic-action';
|
||||
import { Capabilities, CapabilitiesMode } from '../../utils/capabilities';
|
||||
import { LocalStorageBackedArray } from '../../utils/local-storage-backed-array';
|
||||
import { deepGet } from '../../utils/object-change';
|
||||
|
||||
import './services-view.scss';
|
||||
|
||||
|
@ -92,7 +94,6 @@ function formatQueues(
|
|||
}
|
||||
|
||||
export interface ServicesViewProps {
|
||||
middleManager: string | undefined;
|
||||
goToQuery: (initSql: string) => void;
|
||||
goToTask: (taskId: string) => void;
|
||||
capabilities: Capabilities;
|
||||
|
@ -326,8 +327,7 @@ ORDER BY "rank" DESC, "service" DESC`;
|
|||
show: hiddenColumns.exists('Type'),
|
||||
accessor: 'service_type',
|
||||
width: 150,
|
||||
Cell: row => {
|
||||
const value = row.value;
|
||||
Cell: ({ value }) => {
|
||||
return (
|
||||
<a
|
||||
onClick={() => {
|
||||
|
@ -348,8 +348,7 @@ ORDER BY "rank" DESC, "service" DESC`;
|
|||
accessor: row => {
|
||||
return row.tier ? row.tier : row.worker ? row.worker.category : null;
|
||||
},
|
||||
Cell: row => {
|
||||
const value = row.value;
|
||||
Cell: ({ value }) => {
|
||||
return (
|
||||
<a
|
||||
onClick={() => {
|
||||
|
@ -428,7 +427,7 @@ ORDER BY "rank" DESC, "service" DESC`;
|
|||
width: 100,
|
||||
filterable: false,
|
||||
accessor: row => {
|
||||
if (row.service_type === 'middle_manager' || row.service_type === 'indexer') {
|
||||
if (oneOf(row.service_type, 'middle_manager', 'indexer')) {
|
||||
return row.worker ? (row.currCapacityUsed || 0) / row.worker.capacity : null;
|
||||
} else {
|
||||
return row.max_size ? row.curr_size / row.max_size : null;
|
||||
|
@ -488,7 +487,7 @@ ORDER BY "rank" DESC, "service" DESC`;
|
|||
width: 400,
|
||||
filterable: false,
|
||||
accessor: row => {
|
||||
if (row.service_type === 'middle_manager' || row.service_type === 'indexer') {
|
||||
if (oneOf(row.service_type, 'middle_manager', 'indexer')) {
|
||||
if (deepGet(row, 'worker.version') === '') return 'Disabled';
|
||||
|
||||
const details: string[] = [];
|
||||
|
@ -551,10 +550,10 @@ ORDER BY "rank" DESC, "service" DESC`;
|
|||
width: ACTION_COLUMN_WIDTH,
|
||||
accessor: row => row.worker,
|
||||
filterable: false,
|
||||
Cell: row => {
|
||||
if (!row.value) return null;
|
||||
const disabled = row.value.version === '';
|
||||
const workerActions = this.getWorkerActions(row.value.host, disabled);
|
||||
Cell: ({ value }) => {
|
||||
if (!value) return null;
|
||||
const disabled = value.version === '';
|
||||
const workerActions = this.getWorkerActions(value.host, disabled);
|
||||
return <ActionCell actions={workerActions} />;
|
||||
},
|
||||
},
|
||||
|
|
|
@ -61,7 +61,7 @@ module.exports = env => {
|
|||
},
|
||||
target: 'web',
|
||||
resolve: {
|
||||
extensions: ['.tsx', '.ts', '.html', '.js', '.json', '.scss', '.css'],
|
||||
extensions: ['.tsx', '.ts', '.js', '.scss', '.css'],
|
||||
},
|
||||
devServer: {
|
||||
publicPath: '/public',
|
||||
|
|
Loading…
Reference in New Issue