mirror of https://github.com/apache/druid.git
Web console: standardize the spec format (#9477)
* standerdize the spec format * fix spec upgrade
This commit is contained in:
parent
1ba1a3c523
commit
ddc6f87920
|
@ -232,7 +232,6 @@ export const DOCTOR_CHECKS: DoctorCheck[] = [
|
|||
{
|
||||
type: 'index_parallel',
|
||||
spec: {
|
||||
type: 'index_parallel',
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
inputSource: { type: 'inline', data: '{"test":"Data"}' },
|
||||
|
|
|
@ -2,74 +2,76 @@
|
|||
|
||||
exports[`ingestion-spec upgrades 1`] = `
|
||||
Object {
|
||||
"dataSchema": Object {
|
||||
"dataSource": "wikipedia",
|
||||
"dimensionsSpec": Object {
|
||||
"dimensions": Array [
|
||||
"channel",
|
||||
"cityName",
|
||||
"comment",
|
||||
],
|
||||
},
|
||||
"granularitySpec": Object {
|
||||
"queryGranularity": "HOUR",
|
||||
"rollup": true,
|
||||
"segmentGranularity": "DAY",
|
||||
"type": "uniform",
|
||||
},
|
||||
"metricsSpec": Array [
|
||||
Object {
|
||||
"name": "count",
|
||||
"type": "count",
|
||||
"spec": Object {
|
||||
"dataSchema": Object {
|
||||
"dataSource": "wikipedia",
|
||||
"dimensionsSpec": Object {
|
||||
"dimensions": Array [
|
||||
"channel",
|
||||
"cityName",
|
||||
"comment",
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"fieldName": "added",
|
||||
"name": "sum_added",
|
||||
"type": "longSum",
|
||||
"granularitySpec": Object {
|
||||
"queryGranularity": "HOUR",
|
||||
"rollup": true,
|
||||
"segmentGranularity": "DAY",
|
||||
"type": "uniform",
|
||||
},
|
||||
],
|
||||
"timestampSpec": Object {
|
||||
"column": "timestamp",
|
||||
"format": "iso",
|
||||
},
|
||||
"transformSpec": Object {
|
||||
"filter": Object {
|
||||
"dimension": "commentLength",
|
||||
"type": "selector",
|
||||
"value": "35",
|
||||
},
|
||||
"transforms": Array [
|
||||
"metricsSpec": Array [
|
||||
Object {
|
||||
"expression": "concat(\\"channel\\", 'lol')",
|
||||
"name": "channel",
|
||||
"type": "expression",
|
||||
"name": "count",
|
||||
"type": "count",
|
||||
},
|
||||
Object {
|
||||
"fieldName": "added",
|
||||
"name": "sum_added",
|
||||
"type": "longSum",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
"ioConfig": Object {
|
||||
"inputFormat": Object {
|
||||
"flattenSpec": Object {
|
||||
"fields": Array [
|
||||
"timestampSpec": Object {
|
||||
"column": "timestamp",
|
||||
"format": "iso",
|
||||
},
|
||||
"transformSpec": Object {
|
||||
"filter": Object {
|
||||
"dimension": "commentLength",
|
||||
"type": "selector",
|
||||
"value": "35",
|
||||
},
|
||||
"transforms": Array [
|
||||
Object {
|
||||
"expr": "$.cityName",
|
||||
"name": "cityNameAlt",
|
||||
"type": "path",
|
||||
"expression": "concat(\\"channel\\", 'lol')",
|
||||
"name": "channel",
|
||||
"type": "expression",
|
||||
},
|
||||
],
|
||||
},
|
||||
"type": "json",
|
||||
},
|
||||
"inputSource": Object {
|
||||
"type": "http",
|
||||
"uris": Array [
|
||||
"https://static.imply.io/data/wikipedia.json.gz",
|
||||
],
|
||||
"ioConfig": Object {
|
||||
"inputFormat": Object {
|
||||
"flattenSpec": Object {
|
||||
"fields": Array [
|
||||
Object {
|
||||
"expr": "$.cityName",
|
||||
"name": "cityNameAlt",
|
||||
"type": "path",
|
||||
},
|
||||
],
|
||||
},
|
||||
"type": "json",
|
||||
},
|
||||
"inputSource": Object {
|
||||
"type": "http",
|
||||
"uris": Array [
|
||||
"https://static.imply.io/data/wikipedia.json.gz",
|
||||
],
|
||||
},
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"tuningConfig": Object {
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"tuningConfig": Object {
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"type": "index_parallel",
|
||||
}
|
||||
|
|
|
@ -88,28 +88,28 @@ export function updateSchemaWithSample(
|
|||
let newSpec = spec;
|
||||
|
||||
if (dimensionMode === 'auto-detect') {
|
||||
newSpec = deepSet(newSpec, 'dataSchema.dimensionsSpec.dimensions', []);
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.dimensionsSpec.dimensions', []);
|
||||
} else {
|
||||
newSpec = deepDelete(newSpec, 'dataSchema.dimensionsSpec.dimensionExclusions');
|
||||
newSpec = deepDelete(newSpec, 'spec.dataSchema.dimensionsSpec.dimensionExclusions');
|
||||
|
||||
const dimensions = getDimensionSpecs(headerAndRows, rollup);
|
||||
if (dimensions) {
|
||||
newSpec = deepSet(newSpec, 'dataSchema.dimensionsSpec.dimensions', dimensions);
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.dimensionsSpec.dimensions', dimensions);
|
||||
}
|
||||
}
|
||||
|
||||
if (rollup) {
|
||||
newSpec = deepSet(newSpec, 'dataSchema.granularitySpec.queryGranularity', 'HOUR');
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.queryGranularity', 'HOUR');
|
||||
|
||||
const metrics = getMetricSecs(headerAndRows);
|
||||
if (metrics) {
|
||||
newSpec = deepSet(newSpec, 'dataSchema.metricsSpec', metrics);
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.metricsSpec', metrics);
|
||||
}
|
||||
} else {
|
||||
newSpec = deepSet(newSpec, 'dataSchema.granularitySpec.queryGranularity', 'NONE');
|
||||
newSpec = deepDelete(newSpec, 'dataSchema.metricsSpec');
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.queryGranularity', 'NONE');
|
||||
newSpec = deepDelete(newSpec, 'spec.dataSchema.metricsSpec');
|
||||
}
|
||||
|
||||
newSpec = deepSet(newSpec, 'dataSchema.granularitySpec.rollup', rollup);
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec.rollup', rollup);
|
||||
return newSpec;
|
||||
}
|
||||
|
|
|
@ -21,71 +21,73 @@ import { downgradeSpec, upgradeSpec } from './ingestion-spec';
|
|||
describe('ingestion-spec', () => {
|
||||
const oldSpec = {
|
||||
type: 'index_parallel',
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
firehose: {
|
||||
type: 'http',
|
||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||
},
|
||||
},
|
||||
tuningConfig: {
|
||||
type: 'index_parallel',
|
||||
},
|
||||
dataSchema: {
|
||||
dataSource: 'wikipedia',
|
||||
granularitySpec: {
|
||||
type: 'uniform',
|
||||
segmentGranularity: 'DAY',
|
||||
queryGranularity: 'HOUR',
|
||||
rollup: true,
|
||||
},
|
||||
parser: {
|
||||
type: 'string',
|
||||
parseSpec: {
|
||||
format: 'json',
|
||||
timestampSpec: {
|
||||
column: 'timestamp',
|
||||
format: 'iso',
|
||||
},
|
||||
dimensionsSpec: {
|
||||
dimensions: ['channel', 'cityName', 'comment'],
|
||||
},
|
||||
flattenSpec: {
|
||||
fields: [
|
||||
{
|
||||
type: 'path',
|
||||
name: 'cityNameAlt',
|
||||
expr: '$.cityName',
|
||||
},
|
||||
],
|
||||
},
|
||||
spec: {
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
firehose: {
|
||||
type: 'http',
|
||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||
},
|
||||
},
|
||||
transformSpec: {
|
||||
transforms: [
|
||||
tuningConfig: {
|
||||
type: 'index_parallel',
|
||||
},
|
||||
dataSchema: {
|
||||
dataSource: 'wikipedia',
|
||||
granularitySpec: {
|
||||
type: 'uniform',
|
||||
segmentGranularity: 'DAY',
|
||||
queryGranularity: 'HOUR',
|
||||
rollup: true,
|
||||
},
|
||||
parser: {
|
||||
type: 'string',
|
||||
parseSpec: {
|
||||
format: 'json',
|
||||
timestampSpec: {
|
||||
column: 'timestamp',
|
||||
format: 'iso',
|
||||
},
|
||||
dimensionsSpec: {
|
||||
dimensions: ['channel', 'cityName', 'comment'],
|
||||
},
|
||||
flattenSpec: {
|
||||
fields: [
|
||||
{
|
||||
type: 'path',
|
||||
name: 'cityNameAlt',
|
||||
expr: '$.cityName',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
transformSpec: {
|
||||
transforms: [
|
||||
{
|
||||
type: 'expression',
|
||||
name: 'channel',
|
||||
expression: 'concat("channel", \'lol\')',
|
||||
},
|
||||
],
|
||||
filter: {
|
||||
type: 'selector',
|
||||
dimension: 'commentLength',
|
||||
value: '35',
|
||||
},
|
||||
},
|
||||
metricsSpec: [
|
||||
{
|
||||
type: 'expression',
|
||||
name: 'channel',
|
||||
expression: 'concat("channel", \'lol\')',
|
||||
name: 'count',
|
||||
type: 'count',
|
||||
},
|
||||
{
|
||||
name: 'sum_added',
|
||||
type: 'longSum',
|
||||
fieldName: 'added',
|
||||
},
|
||||
],
|
||||
filter: {
|
||||
type: 'selector',
|
||||
dimension: 'commentLength',
|
||||
value: '35',
|
||||
},
|
||||
},
|
||||
metricsSpec: [
|
||||
{
|
||||
name: 'count',
|
||||
type: 'count',
|
||||
},
|
||||
{
|
||||
name: 'sum_added',
|
||||
type: 'longSum',
|
||||
fieldName: 'added',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
|
|
|
@ -40,7 +40,11 @@ export const EMPTY_ARRAY: any[] = [];
|
|||
const CURRENT_YEAR = new Date().getUTCFullYear();
|
||||
|
||||
export interface IngestionSpec {
|
||||
type?: IngestionType;
|
||||
type: IngestionType;
|
||||
spec: IngestionSpecInner;
|
||||
}
|
||||
|
||||
export interface IngestionSpecInner {
|
||||
ioConfig: IoConfig;
|
||||
dataSchema: DataSchema;
|
||||
tuningConfig?: TuningConfig;
|
||||
|
@ -69,8 +73,9 @@ export type IngestionComboType =
|
|||
export type IngestionComboTypeWithExtra = IngestionComboType | 'hadoop' | 'example' | 'other';
|
||||
|
||||
export function adjustIngestionSpec(spec: IngestionSpec) {
|
||||
if (spec.tuningConfig) {
|
||||
spec = deepSet(spec, 'tuningConfig', adjustTuningConfig(spec.tuningConfig));
|
||||
const tuningConfig = deepGet(spec, 'spec.tuningConfig');
|
||||
if (tuningConfig) {
|
||||
spec = deepSet(spec, 'spec.tuningConfig', adjustTuningConfig(tuningConfig));
|
||||
}
|
||||
return spec;
|
||||
}
|
||||
|
@ -88,7 +93,7 @@ function ingestionTypeToIoAndTuningConfigType(ingestionType: IngestionType): str
|
|||
}
|
||||
|
||||
export function getIngestionComboType(spec: IngestionSpec): IngestionComboType | undefined {
|
||||
const ioConfig = deepGet(spec, 'ioConfig') || EMPTY_OBJECT;
|
||||
const ioConfig = deepGet(spec, 'spec.ioConfig') || EMPTY_OBJECT;
|
||||
|
||||
switch (ioConfig.type) {
|
||||
case 'kafka':
|
||||
|
@ -96,7 +101,7 @@ export function getIngestionComboType(spec: IngestionSpec): IngestionComboType |
|
|||
return ioConfig.type;
|
||||
|
||||
case 'index_parallel':
|
||||
const inputSource = deepGet(spec, 'ioConfig.inputSource') || EMPTY_OBJECT;
|
||||
const inputSource = deepGet(spec, 'spec.ioConfig.inputSource') || EMPTY_OBJECT;
|
||||
switch (inputSource.type) {
|
||||
case 'local':
|
||||
case 'http':
|
||||
|
@ -230,20 +235,20 @@ export interface InputFormat {
|
|||
export type DimensionMode = 'specific' | 'auto-detect';
|
||||
|
||||
export function getDimensionMode(spec: IngestionSpec): DimensionMode {
|
||||
const dimensions = deepGet(spec, 'dataSchema.dimensionsSpec.dimensions') || EMPTY_ARRAY;
|
||||
const dimensions = deepGet(spec, 'spec.dataSchema.dimensionsSpec.dimensions') || EMPTY_ARRAY;
|
||||
return Array.isArray(dimensions) && dimensions.length === 0 ? 'auto-detect' : 'specific';
|
||||
}
|
||||
|
||||
export function getRollup(spec: IngestionSpec): boolean {
|
||||
const specRollup = deepGet(spec, 'dataSchema.granularitySpec.rollup');
|
||||
const specRollup = deepGet(spec, 'spec.dataSchema.granularitySpec.rollup');
|
||||
return typeof specRollup === 'boolean' ? specRollup : true;
|
||||
}
|
||||
|
||||
export function getSpecType(spec: Partial<IngestionSpec>): IngestionType {
|
||||
return (
|
||||
deepGet(spec, 'type') ||
|
||||
deepGet(spec, 'ioConfig.type') ||
|
||||
deepGet(spec, 'tuningConfig.type') ||
|
||||
deepGet(spec, 'spec.ioConfig.type') ||
|
||||
deepGet(spec, 'spec.tuningConfig.type') ||
|
||||
'index_parallel'
|
||||
);
|
||||
}
|
||||
|
@ -257,7 +262,7 @@ export function isTask(spec: IngestionSpec) {
|
|||
}
|
||||
|
||||
export function isDruidSource(spec: IngestionSpec): boolean {
|
||||
return deepGet(spec, 'ioConfig.inputSource.type') === 'druid';
|
||||
return deepGet(spec, 'spec.ioConfig.inputSource.type') === 'druid';
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -271,14 +276,21 @@ export function normalizeSpec(spec: Partial<IngestionSpec>): IngestionSpec {
|
|||
}
|
||||
|
||||
// Make sure that if we actually get a task payload we extract the spec
|
||||
if (typeof (spec as any).spec === 'object') spec = (spec as any).spec;
|
||||
if (typeof spec.spec !== 'object' && typeof (spec as any).ioConfig === 'object') {
|
||||
spec = { spec: spec as any };
|
||||
}
|
||||
|
||||
const specType =
|
||||
deepGet(spec, 'type') || deepGet(spec, 'ioConfig.type') || deepGet(spec, 'tuningConfig.type');
|
||||
deepGet(spec, 'type') ||
|
||||
deepGet(spec, 'spec.ioConfig.type') ||
|
||||
deepGet(spec, 'spec.tuningConfig.type');
|
||||
|
||||
if (!specType) return spec as IngestionSpec;
|
||||
if (!deepGet(spec, 'type')) spec = deepSet(spec, 'type', specType);
|
||||
if (!deepGet(spec, 'ioConfig.type')) spec = deepSet(spec, 'ioConfig.type', specType);
|
||||
if (!deepGet(spec, 'tuningConfig.type')) spec = deepSet(spec, 'tuningConfig.type', specType);
|
||||
if (!deepGet(spec, 'spec.ioConfig.type')) spec = deepSet(spec, 'spec.ioConfig.type', specType);
|
||||
if (!deepGet(spec, 'spec.tuningConfig.type')) {
|
||||
spec = deepSet(spec, 'spec.tuningConfig.type', specType);
|
||||
}
|
||||
return spec as IngestionSpec;
|
||||
}
|
||||
|
||||
|
@ -1862,11 +1874,11 @@ function basenameFromFilename(filename: string): string | undefined {
|
|||
export function fillDataSourceNameIfNeeded(spec: IngestionSpec): IngestionSpec {
|
||||
const possibleName = guessDataSourceName(spec);
|
||||
if (!possibleName) return spec;
|
||||
return deepSet(spec, 'dataSchema.dataSource', possibleName);
|
||||
return deepSet(spec, 'spec.dataSchema.dataSource', possibleName);
|
||||
}
|
||||
|
||||
export function guessDataSourceName(spec: IngestionSpec): string | undefined {
|
||||
const ioConfig = deepGet(spec, 'ioConfig');
|
||||
const ioConfig = deepGet(spec, 'spec.ioConfig');
|
||||
if (!ioConfig) return;
|
||||
|
||||
switch (ioConfig.type) {
|
||||
|
@ -2489,22 +2501,22 @@ export function updateIngestionType(
|
|||
|
||||
let newSpec = spec;
|
||||
newSpec = deepSet(newSpec, 'type', ingestionType);
|
||||
newSpec = deepSet(newSpec, 'ioConfig.type', ioAndTuningConfigType);
|
||||
newSpec = deepSet(newSpec, 'tuningConfig.type', ioAndTuningConfigType);
|
||||
newSpec = deepSet(newSpec, 'spec.ioConfig.type', ioAndTuningConfigType);
|
||||
newSpec = deepSet(newSpec, 'spec.tuningConfig.type', ioAndTuningConfigType);
|
||||
|
||||
if (inputSourceType) {
|
||||
newSpec = deepSet(newSpec, 'ioConfig.inputSource', { type: inputSourceType });
|
||||
newSpec = deepSet(newSpec, 'spec.ioConfig.inputSource', { type: inputSourceType });
|
||||
|
||||
if (inputSourceType === 'local') {
|
||||
newSpec = deepSet(newSpec, 'ioConfig.inputSource.filter', '*');
|
||||
newSpec = deepSet(newSpec, 'spec.ioConfig.inputSource.filter', '*');
|
||||
}
|
||||
}
|
||||
|
||||
if (!deepGet(spec, 'dataSchema.dataSource')) {
|
||||
newSpec = deepSet(newSpec, 'dataSchema.dataSource', 'new-data-source');
|
||||
if (!deepGet(spec, 'spec.dataSchema.dataSource')) {
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.dataSource', 'new-data-source');
|
||||
}
|
||||
|
||||
if (!deepGet(spec, 'dataSchema.granularitySpec')) {
|
||||
if (!deepGet(spec, 'spec.dataSchema.granularitySpec')) {
|
||||
const granularitySpec: GranularitySpec = {
|
||||
type: 'uniform',
|
||||
queryGranularity: 'HOUR',
|
||||
|
@ -2513,22 +2525,22 @@ export function updateIngestionType(
|
|||
granularitySpec.segmentGranularity = 'HOUR';
|
||||
}
|
||||
|
||||
newSpec = deepSet(newSpec, 'dataSchema.granularitySpec', granularitySpec);
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.granularitySpec', granularitySpec);
|
||||
}
|
||||
|
||||
if (!deepGet(spec, 'dataSchema.timestampSpec')) {
|
||||
newSpec = deepSet(newSpec, 'dataSchema.timestampSpec', getDummyTimestampSpec());
|
||||
if (!deepGet(spec, 'spec.dataSchema.timestampSpec')) {
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.timestampSpec', getDummyTimestampSpec());
|
||||
}
|
||||
|
||||
if (!deepGet(spec, 'dataSchema.dimensionsSpec')) {
|
||||
newSpec = deepSet(newSpec, 'dataSchema.dimensionsSpec', {});
|
||||
if (!deepGet(spec, 'spec.dataSchema.dimensionsSpec')) {
|
||||
newSpec = deepSet(newSpec, 'spec.dataSchema.dimensionsSpec', {});
|
||||
}
|
||||
|
||||
return newSpec;
|
||||
}
|
||||
|
||||
export function fillInputFormat(spec: IngestionSpec, sampleData: string[]): IngestionSpec {
|
||||
return deepSet(spec, 'ioConfig.inputFormat', guessInputFormat(sampleData));
|
||||
return deepSet(spec, 'spec.ioConfig.inputFormat', guessInputFormat(sampleData));
|
||||
}
|
||||
|
||||
function guessInputFormat(sampleData: string[]): InputFormat {
|
||||
|
@ -2681,53 +2693,61 @@ export function getFilterFormFields() {
|
|||
}
|
||||
|
||||
export function upgradeSpec(spec: any): any {
|
||||
if (deepGet(spec, 'ioConfig.firehose')) {
|
||||
switch (deepGet(spec, 'ioConfig.firehose.type')) {
|
||||
if (deepGet(spec, 'spec.ioConfig.firehose')) {
|
||||
switch (deepGet(spec, 'spec.ioConfig.firehose.type')) {
|
||||
case 'static-s3':
|
||||
deepSet(spec, 'ioConfig.firehose.type', 's3');
|
||||
deepSet(spec, 'spec.ioConfig.firehose.type', 's3');
|
||||
break;
|
||||
|
||||
case 'static-google-blobstore':
|
||||
deepSet(spec, 'ioConfig.firehose.type', 'google');
|
||||
deepMove(spec, 'ioConfig.firehose.blobs', 'ioConfig.firehose.objects');
|
||||
deepSet(spec, 'spec.ioConfig.firehose.type', 'google');
|
||||
deepMove(spec, 'spec.ioConfig.firehose.blobs', 'spec.ioConfig.firehose.objects');
|
||||
break;
|
||||
}
|
||||
|
||||
spec = deepMove(spec, 'ioConfig.firehose', 'ioConfig.inputSource');
|
||||
spec = deepMove(spec, 'dataSchema.parser.parseSpec.timestampSpec', 'dataSchema.timestampSpec');
|
||||
spec = deepMove(spec, 'spec.ioConfig.firehose', 'spec.ioConfig.inputSource');
|
||||
spec = deepMove(
|
||||
spec,
|
||||
'dataSchema.parser.parseSpec.dimensionsSpec',
|
||||
'dataSchema.dimensionsSpec',
|
||||
'spec.dataSchema.parser.parseSpec.timestampSpec',
|
||||
'spec.dataSchema.timestampSpec',
|
||||
);
|
||||
spec = deepMove(spec, 'dataSchema.parser.parseSpec', 'ioConfig.inputFormat');
|
||||
spec = deepDelete(spec, 'dataSchema.parser');
|
||||
spec = deepMove(spec, 'ioConfig.inputFormat.format', 'ioConfig.inputFormat.type');
|
||||
spec = deepMove(
|
||||
spec,
|
||||
'spec.dataSchema.parser.parseSpec.dimensionsSpec',
|
||||
'spec.dataSchema.dimensionsSpec',
|
||||
);
|
||||
spec = deepMove(spec, 'spec.dataSchema.parser.parseSpec', 'spec.ioConfig.inputFormat');
|
||||
spec = deepDelete(spec, 'spec.dataSchema.parser');
|
||||
spec = deepMove(spec, 'spec.ioConfig.inputFormat.format', 'spec.ioConfig.inputFormat.type');
|
||||
}
|
||||
return spec;
|
||||
}
|
||||
|
||||
export function downgradeSpec(spec: any): any {
|
||||
if (deepGet(spec, 'ioConfig.inputSource')) {
|
||||
spec = deepMove(spec, 'ioConfig.inputFormat.type', 'ioConfig.inputFormat.format');
|
||||
spec = deepSet(spec, 'dataSchema.parser', { type: 'string' });
|
||||
spec = deepMove(spec, 'ioConfig.inputFormat', 'dataSchema.parser.parseSpec');
|
||||
if (deepGet(spec, 'spec.ioConfig.inputSource')) {
|
||||
spec = deepMove(spec, 'spec.ioConfig.inputFormat.type', 'spec.ioConfig.inputFormat.format');
|
||||
spec = deepSet(spec, 'spec.dataSchema.parser', { type: 'string' });
|
||||
spec = deepMove(spec, 'spec.ioConfig.inputFormat', 'spec.dataSchema.parser.parseSpec');
|
||||
spec = deepMove(
|
||||
spec,
|
||||
'dataSchema.dimensionsSpec',
|
||||
'dataSchema.parser.parseSpec.dimensionsSpec',
|
||||
'spec.dataSchema.dimensionsSpec',
|
||||
'spec.dataSchema.parser.parseSpec.dimensionsSpec',
|
||||
);
|
||||
spec = deepMove(spec, 'dataSchema.timestampSpec', 'dataSchema.parser.parseSpec.timestampSpec');
|
||||
spec = deepMove(spec, 'ioConfig.inputSource', 'ioConfig.firehose');
|
||||
spec = deepMove(
|
||||
spec,
|
||||
'spec.dataSchema.timestampSpec',
|
||||
'spec.dataSchema.parser.parseSpec.timestampSpec',
|
||||
);
|
||||
spec = deepMove(spec, 'spec.ioConfig.inputSource', 'spec.ioConfig.firehose');
|
||||
|
||||
switch (deepGet(spec, 'ioConfig.firehose.type')) {
|
||||
switch (deepGet(spec, 'spec.ioConfig.firehose.type')) {
|
||||
case 's3':
|
||||
deepSet(spec, 'ioConfig.firehose.type', 'static-s3');
|
||||
deepSet(spec, 'spec.ioConfig.firehose.type', 'static-s3');
|
||||
break;
|
||||
|
||||
case 'google':
|
||||
deepSet(spec, 'ioConfig.firehose.type', 'static-google-blobstore');
|
||||
deepMove(spec, 'ioConfig.firehose.objects', 'ioConfig.firehose.blobs');
|
||||
deepSet(spec, 'spec.ioConfig.firehose.type', 'static-google-blobstore');
|
||||
deepMove(spec, 'spec.ioConfig.firehose.objects', 'spec.ioConfig.firehose.blobs');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,11 +44,9 @@ const BASE_SAMPLER_CONFIG: SamplerConfig = {
|
|||
timeoutMs: 15000,
|
||||
};
|
||||
|
||||
export interface SampleSpec {
|
||||
type: string;
|
||||
spec: IngestionSpec;
|
||||
export type SampleSpec = IngestionSpec & {
|
||||
samplerConfig: SamplerConfig;
|
||||
}
|
||||
};
|
||||
|
||||
export interface SamplerConfig {
|
||||
numRows?: number;
|
||||
|
@ -207,7 +205,7 @@ function makeSamplerIoConfig(
|
|||
This is a hack to deal with the fact that the sampler can not deal with the index_parallel type
|
||||
*/
|
||||
function fixSamplerTypes(sampleSpec: SampleSpec): SampleSpec {
|
||||
let samplerType: string = getSpecType(sampleSpec.spec);
|
||||
let samplerType: string = getSpecType(sampleSpec);
|
||||
if (samplerType === 'index_parallel') {
|
||||
samplerType = 'index';
|
||||
}
|
||||
|
@ -244,7 +242,7 @@ export async function sampleForConnect(
|
|||
): Promise<SampleResponseWithExtraInfo> {
|
||||
const samplerType = getSpecType(spec);
|
||||
let ioConfig: IoConfig = makeSamplerIoConfig(
|
||||
deepGet(spec, 'ioConfig'),
|
||||
deepGet(spec, 'spec.ioConfig'),
|
||||
samplerType,
|
||||
sampleStrategy,
|
||||
);
|
||||
|
@ -308,7 +306,7 @@ export async function sampleForParser(
|
|||
): Promise<SampleResponse> {
|
||||
const samplerType = getSpecType(spec);
|
||||
const ioConfig: IoConfig = makeSamplerIoConfig(
|
||||
deepGet(spec, 'ioConfig'),
|
||||
deepGet(spec, 'spec.ioConfig'),
|
||||
samplerType,
|
||||
sampleStrategy,
|
||||
);
|
||||
|
@ -316,7 +314,6 @@ export async function sampleForParser(
|
|||
const sampleSpec: SampleSpec = {
|
||||
type: samplerType,
|
||||
spec: {
|
||||
type: samplerType,
|
||||
ioConfig,
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
|
@ -335,15 +332,14 @@ export async function sampleForTimestamp(
|
|||
cacheRows: CacheRows,
|
||||
): Promise<SampleResponse> {
|
||||
const samplerType = getSpecType(spec);
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'dataSchema.timestampSpec');
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec');
|
||||
const columnTimestampSpec = isColumnTimestampSpec(timestampSpec);
|
||||
|
||||
// First do a query with a static timestamp spec
|
||||
const sampleSpecColumns: SampleSpec = {
|
||||
type: samplerType,
|
||||
spec: {
|
||||
type: samplerType,
|
||||
ioConfig: deepGet(spec, 'ioConfig'),
|
||||
ioConfig: deepGet(spec, 'spec.ioConfig'),
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
dimensionsSpec: {},
|
||||
|
@ -366,8 +362,7 @@ export async function sampleForTimestamp(
|
|||
const sampleSpec: SampleSpec = {
|
||||
type: samplerType,
|
||||
spec: {
|
||||
type: samplerType,
|
||||
ioConfig: deepGet(spec, 'ioConfig'),
|
||||
ioConfig: deepGet(spec, 'spec.ioConfig'),
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
dimensionsSpec: {},
|
||||
|
@ -402,9 +397,9 @@ export async function sampleForTransform(
|
|||
cacheRows: CacheRows,
|
||||
): Promise<SampleResponse> {
|
||||
const samplerType = getSpecType(spec);
|
||||
const inputFormatColumns: string[] = deepGet(spec, 'ioConfig.inputFormat.columns') || [];
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'dataSchema.timestampSpec');
|
||||
const transforms: Transform[] = deepGet(spec, 'dataSchema.transformSpec.transforms') || [];
|
||||
const inputFormatColumns: string[] = deepGet(spec, 'spec.ioConfig.inputFormat.columns') || [];
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec');
|
||||
const transforms: Transform[] = deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || [];
|
||||
|
||||
// Extra step to simulate auto detecting dimension with transforms
|
||||
const specialDimensionSpec: DimensionsSpec = {};
|
||||
|
@ -412,8 +407,7 @@ export async function sampleForTransform(
|
|||
const sampleSpecHack: SampleSpec = {
|
||||
type: samplerType,
|
||||
spec: {
|
||||
type: samplerType,
|
||||
ioConfig: deepGet(spec, 'ioConfig'),
|
||||
ioConfig: deepGet(spec, 'spec.ioConfig'),
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
timestampSpec,
|
||||
|
@ -440,8 +434,7 @@ export async function sampleForTransform(
|
|||
const sampleSpec: SampleSpec = {
|
||||
type: samplerType,
|
||||
spec: {
|
||||
type: samplerType,
|
||||
ioConfig: deepGet(spec, 'ioConfig'),
|
||||
ioConfig: deepGet(spec, 'spec.ioConfig'),
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
timestampSpec,
|
||||
|
@ -462,10 +455,10 @@ export async function sampleForFilter(
|
|||
cacheRows: CacheRows,
|
||||
): Promise<SampleResponse> {
|
||||
const samplerType = getSpecType(spec);
|
||||
const inputFormatColumns: string[] = deepGet(spec, 'ioConfig.inputFormat.columns') || [];
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'dataSchema.timestampSpec');
|
||||
const transforms: Transform[] = deepGet(spec, 'dataSchema.transformSpec.transforms') || [];
|
||||
const filter: any = deepGet(spec, 'dataSchema.transformSpec.filter');
|
||||
const inputFormatColumns: string[] = deepGet(spec, 'spec.ioConfig.inputFormat.columns') || [];
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec');
|
||||
const transforms: Transform[] = deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || [];
|
||||
const filter: any = deepGet(spec, 'spec.dataSchema.transformSpec.filter');
|
||||
|
||||
// Extra step to simulate auto detecting dimension with transforms
|
||||
const specialDimensionSpec: DimensionsSpec = {};
|
||||
|
@ -473,8 +466,7 @@ export async function sampleForFilter(
|
|||
const sampleSpecHack: SampleSpec = {
|
||||
type: samplerType,
|
||||
spec: {
|
||||
type: samplerType,
|
||||
ioConfig: deepGet(spec, 'ioConfig'),
|
||||
ioConfig: deepGet(spec, 'spec.ioConfig'),
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
timestampSpec,
|
||||
|
@ -501,8 +493,7 @@ export async function sampleForFilter(
|
|||
const sampleSpec: SampleSpec = {
|
||||
type: samplerType,
|
||||
spec: {
|
||||
type: samplerType,
|
||||
ioConfig: deepGet(spec, 'ioConfig'),
|
||||
ioConfig: deepGet(spec, 'spec.ioConfig'),
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
timestampSpec,
|
||||
|
@ -524,19 +515,18 @@ export async function sampleForSchema(
|
|||
cacheRows: CacheRows,
|
||||
): Promise<SampleResponse> {
|
||||
const samplerType = getSpecType(spec);
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'dataSchema.timestampSpec');
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec');
|
||||
const transformSpec: TransformSpec =
|
||||
deepGet(spec, 'dataSchema.transformSpec') || ({} as TransformSpec);
|
||||
const dimensionsSpec: DimensionsSpec = deepGet(spec, 'dataSchema.dimensionsSpec');
|
||||
const metricsSpec: MetricSpec[] = deepGet(spec, 'dataSchema.metricsSpec') || [];
|
||||
deepGet(spec, 'spec.dataSchema.transformSpec') || ({} as TransformSpec);
|
||||
const dimensionsSpec: DimensionsSpec = deepGet(spec, 'spec.dataSchema.dimensionsSpec');
|
||||
const metricsSpec: MetricSpec[] = deepGet(spec, 'spec.dataSchema.metricsSpec') || [];
|
||||
const queryGranularity: string =
|
||||
deepGet(spec, 'dataSchema.granularitySpec.queryGranularity') || 'NONE';
|
||||
deepGet(spec, 'spec.dataSchema.granularitySpec.queryGranularity') || 'NONE';
|
||||
|
||||
const sampleSpec: SampleSpec = {
|
||||
type: samplerType,
|
||||
spec: {
|
||||
type: samplerType,
|
||||
ioConfig: deepGet(spec, 'ioConfig'),
|
||||
ioConfig: deepGet(spec, 'spec.ioConfig'),
|
||||
dataSchema: {
|
||||
dataSource: 'sample',
|
||||
timestampSpec,
|
||||
|
@ -560,7 +550,6 @@ export async function sampleForExampleManifests(
|
|||
const exampleSpec: SampleSpec = {
|
||||
type: 'index_parallel',
|
||||
spec: {
|
||||
type: 'index_parallel',
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
inputSource: { type: 'http', uris: [exampleManifestUrl] },
|
||||
|
|
|
@ -30,31 +30,33 @@ import { applyCache, headerFromSampleResponse } from './sampler';
|
|||
describe('test-utils', () => {
|
||||
const ingestionSpec: IngestionSpec = {
|
||||
type: 'index_parallel',
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
inputSource: {
|
||||
type: 'http',
|
||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||
spec: {
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
inputSource: {
|
||||
type: 'http',
|
||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||
},
|
||||
inputFormat: {
|
||||
type: 'json',
|
||||
},
|
||||
},
|
||||
inputFormat: {
|
||||
type: 'json',
|
||||
tuningConfig: {
|
||||
type: 'index_parallel',
|
||||
},
|
||||
},
|
||||
tuningConfig: {
|
||||
type: 'index_parallel',
|
||||
},
|
||||
dataSchema: {
|
||||
dataSource: 'wikipedia',
|
||||
granularitySpec: {
|
||||
type: 'uniform',
|
||||
segmentGranularity: 'DAY',
|
||||
queryGranularity: 'HOUR',
|
||||
dataSchema: {
|
||||
dataSource: 'wikipedia',
|
||||
granularitySpec: {
|
||||
type: 'uniform',
|
||||
segmentGranularity: 'DAY',
|
||||
queryGranularity: 'HOUR',
|
||||
},
|
||||
timestampSpec: {
|
||||
column: 'timestamp',
|
||||
format: 'iso',
|
||||
},
|
||||
dimensionsSpec: {},
|
||||
},
|
||||
timestampSpec: {
|
||||
column: 'timestamp',
|
||||
format: 'iso',
|
||||
},
|
||||
dimensionsSpec: {},
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -72,14 +74,12 @@ describe('test-utils', () => {
|
|||
it('spec-utils applyCache', () => {
|
||||
expect(
|
||||
applyCache(
|
||||
{
|
||||
type: 'index_parallel',
|
||||
spec: ingestionSpec,
|
||||
Object.assign({}, ingestionSpec, {
|
||||
samplerConfig: {
|
||||
numRows: 500,
|
||||
timeoutMs: 15000,
|
||||
},
|
||||
},
|
||||
}),
|
||||
[{ make: 'Honda', model: 'Accord' }, { make: 'Toyota', model: 'Prius' }],
|
||||
),
|
||||
).toMatchInlineSnapshot(`
|
||||
|
@ -157,31 +157,33 @@ describe('test-utils', () => {
|
|||
describe('druid-type.ts', () => {
|
||||
const ingestionSpec: IngestionSpec = {
|
||||
type: 'index_parallel',
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
inputSource: {
|
||||
type: 'http',
|
||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||
spec: {
|
||||
ioConfig: {
|
||||
type: 'index_parallel',
|
||||
inputSource: {
|
||||
type: 'http',
|
||||
uris: ['https://static.imply.io/data/wikipedia.json.gz'],
|
||||
},
|
||||
inputFormat: {
|
||||
type: 'json',
|
||||
},
|
||||
},
|
||||
inputFormat: {
|
||||
type: 'json',
|
||||
tuningConfig: {
|
||||
type: 'index_parallel',
|
||||
},
|
||||
},
|
||||
tuningConfig: {
|
||||
type: 'index_parallel',
|
||||
},
|
||||
dataSchema: {
|
||||
dataSource: 'wikipedia',
|
||||
granularitySpec: {
|
||||
type: 'uniform',
|
||||
segmentGranularity: 'DAY',
|
||||
queryGranularity: 'HOUR',
|
||||
dataSchema: {
|
||||
dataSource: 'wikipedia',
|
||||
granularitySpec: {
|
||||
type: 'uniform',
|
||||
segmentGranularity: 'DAY',
|
||||
queryGranularity: 'HOUR',
|
||||
},
|
||||
timestampSpec: {
|
||||
column: 'timestamp',
|
||||
format: 'iso',
|
||||
},
|
||||
dimensionsSpec: {},
|
||||
},
|
||||
timestampSpec: {
|
||||
column: 'timestamp',
|
||||
format: 'iso',
|
||||
},
|
||||
dimensionsSpec: {},
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -219,44 +221,46 @@ describe('druid-type.ts', () => {
|
|||
updateSchemaWithSample(ingestionSpec, { header: ['header'], rows: [] }, 'specific', true),
|
||||
).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"dataSchema": Object {
|
||||
"dataSource": "wikipedia",
|
||||
"dimensionsSpec": Object {
|
||||
"dimensions": Array [
|
||||
"header",
|
||||
],
|
||||
},
|
||||
"granularitySpec": Object {
|
||||
"queryGranularity": "HOUR",
|
||||
"rollup": true,
|
||||
"segmentGranularity": "DAY",
|
||||
"type": "uniform",
|
||||
},
|
||||
"metricsSpec": Array [
|
||||
Object {
|
||||
"name": "count",
|
||||
"type": "count",
|
||||
"spec": Object {
|
||||
"dataSchema": Object {
|
||||
"dataSource": "wikipedia",
|
||||
"dimensionsSpec": Object {
|
||||
"dimensions": Array [
|
||||
"header",
|
||||
],
|
||||
},
|
||||
],
|
||||
"timestampSpec": Object {
|
||||
"column": "timestamp",
|
||||
"format": "iso",
|
||||
},
|
||||
},
|
||||
"ioConfig": Object {
|
||||
"inputFormat": Object {
|
||||
"type": "json",
|
||||
},
|
||||
"inputSource": Object {
|
||||
"type": "http",
|
||||
"uris": Array [
|
||||
"https://static.imply.io/data/wikipedia.json.gz",
|
||||
"granularitySpec": Object {
|
||||
"queryGranularity": "HOUR",
|
||||
"rollup": true,
|
||||
"segmentGranularity": "DAY",
|
||||
"type": "uniform",
|
||||
},
|
||||
"metricsSpec": Array [
|
||||
Object {
|
||||
"name": "count",
|
||||
"type": "count",
|
||||
},
|
||||
],
|
||||
"timestampSpec": Object {
|
||||
"column": "timestamp",
|
||||
"format": "iso",
|
||||
},
|
||||
},
|
||||
"ioConfig": Object {
|
||||
"inputFormat": Object {
|
||||
"type": "json",
|
||||
},
|
||||
"inputSource": Object {
|
||||
"type": "http",
|
||||
"uris": Array [
|
||||
"https://static.imply.io/data/wikipedia.json.gz",
|
||||
],
|
||||
},
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"tuningConfig": Object {
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"tuningConfig": Object {
|
||||
"type": "index_parallel",
|
||||
},
|
||||
"type": "index_parallel",
|
||||
}
|
||||
|
|
|
@ -419,7 +419,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
isStepEnabled(step: Step): boolean {
|
||||
const { spec, cacheRows } = this.state;
|
||||
const druidSource = isDruidSource(spec);
|
||||
const ioConfig: IoConfig = deepGet(spec, 'ioConfig') || EMPTY_OBJECT;
|
||||
const ioConfig: IoConfig = deepGet(spec, 'spec.ioConfig') || EMPTY_OBJECT;
|
||||
|
||||
switch (step) {
|
||||
case 'connect':
|
||||
|
@ -453,7 +453,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
newSpec = upgradeSpec(newSpec);
|
||||
newSpec = adjustIngestionSpec(newSpec);
|
||||
const deltaState: Partial<LoadDataViewState> = { spec: newSpec, specPreview: newSpec };
|
||||
if (!deepGet(newSpec, 'ioConfig.type')) {
|
||||
if (!deepGet(newSpec, 'spec.ioConfig.type')) {
|
||||
deltaState.cacheRows = undefined;
|
||||
}
|
||||
this.setState(deltaState as LoadDataViewState);
|
||||
|
@ -961,7 +961,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
async queryForConnect(initRun = false) {
|
||||
const { spec, sampleStrategy } = this.state;
|
||||
const ioConfig: IoConfig = deepGet(spec, 'ioConfig') || EMPTY_OBJECT;
|
||||
const ioConfig: IoConfig = deepGet(spec, 'spec.ioConfig') || EMPTY_OBJECT;
|
||||
|
||||
let issue: string | undefined;
|
||||
if (issueWithIoConfig(ioConfig, true)) {
|
||||
|
@ -1001,8 +1001,8 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
renderConnectStep() {
|
||||
const { specPreview: spec, inputQueryState, sampleStrategy } = this.state;
|
||||
const specType = getSpecType(spec);
|
||||
const ioConfig: IoConfig = deepGet(spec, 'ioConfig') || EMPTY_OBJECT;
|
||||
const inlineMode = deepGet(spec, 'ioConfig.inputSource.type') === 'inline';
|
||||
const ioConfig: IoConfig = deepGet(spec, 'spec.ioConfig') || EMPTY_OBJECT;
|
||||
const inlineMode = deepGet(spec, 'spec.ioConfig.inputSource.type') === 'inline';
|
||||
const druidSource = isDruidSource(spec);
|
||||
|
||||
let mainFill: JSX.Element | string = '';
|
||||
|
@ -1011,10 +1011,10 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<TextArea
|
||||
className="inline-data"
|
||||
placeholder="Paste your data here"
|
||||
value={deepGet(spec, 'ioConfig.inputSource.data')}
|
||||
value={deepGet(spec, 'spec.ioConfig.inputSource.data')}
|
||||
onChange={(e: any) => {
|
||||
const stringValue = e.target.value.substr(0, MAX_INLINE_DATA_LENGTH);
|
||||
this.updateSpecPreview(deepSet(spec, 'ioConfig.inputSource.data', stringValue));
|
||||
this.updateSpecPreview(deepSet(spec, 'spec.ioConfig.inputSource.data', stringValue));
|
||||
}}
|
||||
/>
|
||||
);
|
||||
|
@ -1078,18 +1078,18 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={getIoConfigFormFields(ingestionComboType)}
|
||||
model={ioConfig}
|
||||
onChange={c => this.updateSpecPreview(deepSet(spec, 'ioConfig', c))}
|
||||
onChange={c => this.updateSpecPreview(deepSet(spec, 'spec.ioConfig', c))}
|
||||
/>
|
||||
) : (
|
||||
<FormGroup label="IO Config">
|
||||
<JsonInput
|
||||
value={ioConfig}
|
||||
onChange={c => this.updateSpecPreview(deepSet(spec, 'ioConfig', c))}
|
||||
onChange={c => this.updateSpecPreview(deepSet(spec, 'spec.ioConfig', c))}
|
||||
height="300px"
|
||||
/>
|
||||
</FormGroup>
|
||||
)}
|
||||
{deepGet(spec, 'ioConfig.inputSource.type') === 'local' && (
|
||||
{deepGet(spec, 'spec.ioConfig.inputSource.type') === 'local' && (
|
||||
<FormGroup>
|
||||
<Callout intent={Intent.WARNING}>
|
||||
This path must be available on the local filesystem of all Druid services.
|
||||
|
@ -1117,19 +1117,23 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
const inputData = inputQueryState.data;
|
||||
|
||||
if (druidSource) {
|
||||
let newSpec = deepSet(spec, 'dataSchema.timestampSpec', {
|
||||
let newSpec = deepSet(spec, 'spec.dataSchema.timestampSpec', {
|
||||
column: '__time',
|
||||
format: 'iso',
|
||||
});
|
||||
|
||||
if (typeof inputData.rollup === 'boolean') {
|
||||
newSpec = deepSet(newSpec, 'dataSchema.granularitySpec.rollup', inputData.rollup);
|
||||
newSpec = deepSet(
|
||||
newSpec,
|
||||
'spec.dataSchema.granularitySpec.rollup',
|
||||
inputData.rollup,
|
||||
);
|
||||
}
|
||||
|
||||
if (inputData.queryGranularity) {
|
||||
newSpec = deepSet(
|
||||
newSpec,
|
||||
'dataSchema.granularitySpec.queryGranularity',
|
||||
'spec.dataSchema.granularitySpec.queryGranularity',
|
||||
inputData.queryGranularity,
|
||||
);
|
||||
}
|
||||
|
@ -1138,7 +1142,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
const aggregators = inputData.aggregators || {};
|
||||
newSpec = deepSet(
|
||||
newSpec,
|
||||
'dataSchema.dimensionsSpec.dimensions',
|
||||
'spec.dataSchema.dimensionsSpec.dimensions',
|
||||
Object.keys(inputData.columns)
|
||||
.filter(k => k !== '__time' && !aggregators[k])
|
||||
.map(k => ({
|
||||
|
@ -1151,7 +1155,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
if (inputData.aggregators) {
|
||||
newSpec = deepSet(
|
||||
newSpec,
|
||||
'dataSchema.metricsSpec',
|
||||
'spec.dataSchema.metricsSpec',
|
||||
Object.values(inputData.aggregators),
|
||||
);
|
||||
}
|
||||
|
@ -1179,9 +1183,9 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
async queryForParser(initRun = false) {
|
||||
const { spec, sampleStrategy } = this.state;
|
||||
const ioConfig: IoConfig = deepGet(spec, 'ioConfig') || EMPTY_OBJECT;
|
||||
const ioConfig: IoConfig = deepGet(spec, 'spec.ioConfig') || EMPTY_OBJECT;
|
||||
const inputFormatColumns: string[] =
|
||||
deepGet(spec, 'ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
deepGet(spec, 'spec.ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
|
||||
let issue: string | undefined;
|
||||
if (issueWithIoConfig(ioConfig)) {
|
||||
|
@ -1225,9 +1229,9 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
parserQueryState,
|
||||
selectedFlattenField,
|
||||
} = this.state;
|
||||
const inputFormat: InputFormat = deepGet(spec, 'ioConfig.inputFormat') || EMPTY_OBJECT;
|
||||
const inputFormat: InputFormat = deepGet(spec, 'spec.ioConfig.inputFormat') || EMPTY_OBJECT;
|
||||
const flattenFields: FlattenField[] =
|
||||
deepGet(spec, 'ioConfig.inputFormat.flattenSpec.fields') || EMPTY_ARRAY;
|
||||
deepGet(spec, 'spec.ioConfig.inputFormat.flattenSpec.fields') || EMPTY_ARRAY;
|
||||
|
||||
const canFlatten = inputFormatCanFlatten(inputFormat);
|
||||
|
||||
|
@ -1312,7 +1316,9 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={getInputFormatFormFields()}
|
||||
model={inputFormat}
|
||||
onChange={p => this.updateSpecPreview(deepSet(spec, 'ioConfig.inputFormat', p))}
|
||||
onChange={p =>
|
||||
this.updateSpecPreview(deepSet(spec, 'spec.ioConfig.inputFormat', p))
|
||||
}
|
||||
/>
|
||||
{this.renderApplyButtonBar()}
|
||||
</>
|
||||
|
@ -1325,7 +1331,11 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
text={`Auto add ${pluralIfNeeded(sugestedFlattenFields.length, 'flatten spec')}`}
|
||||
onClick={() => {
|
||||
this.updateSpec(
|
||||
deepSet(spec, 'ioConfig.inputFormat.flattenSpec.fields', sugestedFlattenFields),
|
||||
deepSet(
|
||||
spec,
|
||||
'spec.ioConfig.inputFormat.flattenSpec.fields',
|
||||
sugestedFlattenFields,
|
||||
),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
|
@ -1351,7 +1361,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
if (possibleTimestampSpec) {
|
||||
const newSpec: IngestionSpec = deepSet(
|
||||
spec,
|
||||
'dataSchema.timestampSpec',
|
||||
'spec.dataSchema.timestampSpec',
|
||||
possibleTimestampSpec,
|
||||
);
|
||||
this.updateSpec(newSpec);
|
||||
|
@ -1371,7 +1381,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
renderFlattenControls(): JSX.Element | undefined {
|
||||
const { spec, selectedFlattenField, selectedFlattenFieldIndex } = this.state;
|
||||
const inputFormat: InputFormat = deepGet(spec, 'ioConfig.inputFormat') || EMPTY_OBJECT;
|
||||
const inputFormat: InputFormat = deepGet(spec, 'spec.ioConfig.inputFormat') || EMPTY_OBJECT;
|
||||
if (!inputFormatCanFlatten(inputFormat)) return;
|
||||
|
||||
const close = () => {
|
||||
|
@ -1397,7 +1407,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
this.updateSpec(
|
||||
deepSet(
|
||||
spec,
|
||||
`ioConfig.inputFormat.flattenSpec.fields.${selectedFlattenFieldIndex}`,
|
||||
`spec.ioConfig.inputFormat.flattenSpec.fields.${selectedFlattenFieldIndex}`,
|
||||
selectedFlattenField,
|
||||
),
|
||||
);
|
||||
|
@ -1414,7 +1424,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
this.updateSpec(
|
||||
deepDelete(
|
||||
spec,
|
||||
`ioConfig.inputFormat.flattenSpec.fields.${selectedFlattenFieldIndex}`,
|
||||
`spec.ioConfig.inputFormat.flattenSpec.fields.${selectedFlattenFieldIndex}`,
|
||||
),
|
||||
);
|
||||
close();
|
||||
|
@ -1452,8 +1462,8 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
async queryForTimestamp(initRun = false) {
|
||||
const { spec, cacheRows } = this.state;
|
||||
const inputFormatColumns: string[] =
|
||||
deepGet(spec, 'ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
const timestampSpec = deepGet(spec, 'dataSchema.timestampSpec') || EMPTY_OBJECT;
|
||||
deepGet(spec, 'spec.ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
const timestampSpec = deepGet(spec, 'spec.dataSchema.timestampSpec') || EMPTY_OBJECT;
|
||||
|
||||
if (!cacheRows) {
|
||||
this.setState({
|
||||
|
@ -1494,7 +1504,8 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
renderTimestampStep() {
|
||||
const { specPreview: spec, columnFilter, specialColumnsOnly, timestampQueryState } = this.state;
|
||||
const timestampSpec: TimestampSpec = deepGet(spec, 'dataSchema.timestampSpec') || EMPTY_OBJECT;
|
||||
const timestampSpec: TimestampSpec =
|
||||
deepGet(spec, 'spec.dataSchema.timestampSpec') || EMPTY_OBJECT;
|
||||
const timestampSpecFromColumn = isColumnTimestampSpec(timestampSpec);
|
||||
|
||||
let mainFill: JSX.Element | string = '';
|
||||
|
@ -1563,7 +1574,9 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
column: 'timestamp',
|
||||
format: 'auto',
|
||||
};
|
||||
this.updateSpecPreview(deepSet(spec, 'dataSchema.timestampSpec', timestampSpec));
|
||||
this.updateSpecPreview(
|
||||
deepSet(spec, 'spec.dataSchema.timestampSpec', timestampSpec),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
<Button
|
||||
|
@ -1571,7 +1584,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
active={!timestampSpecFromColumn}
|
||||
onClick={() => {
|
||||
this.updateSpecPreview(
|
||||
deepSet(spec, 'dataSchema.timestampSpec', getConstantTimestampSpec()),
|
||||
deepSet(spec, 'spec.dataSchema.timestampSpec', getConstantTimestampSpec()),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
|
@ -1581,7 +1594,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
fields={getTimestampSpecFormFields(timestampSpec)}
|
||||
model={timestampSpec}
|
||||
onChange={timestampSpec => {
|
||||
this.updateSpecPreview(deepSet(spec, 'dataSchema.timestampSpec', timestampSpec));
|
||||
this.updateSpecPreview(deepSet(spec, 'spec.dataSchema.timestampSpec', timestampSpec));
|
||||
}}
|
||||
/>
|
||||
{this.renderApplyButtonBar()}
|
||||
|
@ -1595,7 +1608,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
private onTimestampColumnSelect = (newTimestampSpec: TimestampSpec) => {
|
||||
const { specPreview } = this.state;
|
||||
this.updateSpecPreview(deepSet(specPreview, 'dataSchema.timestampSpec', newTimestampSpec));
|
||||
this.updateSpecPreview(deepSet(specPreview, 'spec.dataSchema.timestampSpec', newTimestampSpec));
|
||||
};
|
||||
|
||||
// ==================================================================
|
||||
|
@ -1603,7 +1616,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
async queryForTransform(initRun = false) {
|
||||
const { spec, cacheRows } = this.state;
|
||||
const inputFormatColumns: string[] =
|
||||
deepGet(spec, 'ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
deepGet(spec, 'spec.ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
|
||||
if (!cacheRows) {
|
||||
this.setState({
|
||||
|
@ -1649,7 +1662,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
// selectedTransformIndex,
|
||||
} = this.state;
|
||||
const transforms: Transform[] =
|
||||
deepGet(spec, 'dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
|
||||
deepGet(spec, 'spec.dataSchema.transformSpec.transforms') || EMPTY_ARRAY;
|
||||
|
||||
let mainFill: JSX.Element | string = '';
|
||||
if (transformQueryState.isInit()) {
|
||||
|
@ -1773,7 +1786,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
this.updateSpec(
|
||||
deepSet(
|
||||
spec,
|
||||
`dataSchema.transformSpec.transforms.${selectedTransformIndex}`,
|
||||
`spec.dataSchema.transformSpec.transforms.${selectedTransformIndex}`,
|
||||
selectedTransform,
|
||||
),
|
||||
);
|
||||
|
@ -1790,7 +1803,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
this.updateSpec(
|
||||
deepDelete(
|
||||
spec,
|
||||
`dataSchema.transformSpec.transforms.${selectedTransformIndex}`,
|
||||
`spec.dataSchema.transformSpec.transforms.${selectedTransformIndex}`,
|
||||
),
|
||||
);
|
||||
close();
|
||||
|
@ -1822,7 +1835,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
async queryForFilter(initRun = false) {
|
||||
const { spec, cacheRows } = this.state;
|
||||
const inputFormatColumns: string[] =
|
||||
deepGet(spec, 'ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
deepGet(spec, 'spec.ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
|
||||
if (!cacheRows) {
|
||||
this.setState({
|
||||
|
@ -1864,7 +1877,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
// The filters matched no data
|
||||
let sampleResponseNoFilter: SampleResponse;
|
||||
try {
|
||||
const specNoFilter = deepSet(spec, 'dataSchema.transformSpec.filter', null);
|
||||
const specNoFilter = deepSet(spec, 'spec.dataSchema.transformSpec.filter', null);
|
||||
sampleResponseNoFilter = await sampleForFilter(specNoFilter, cacheRows);
|
||||
} catch (e) {
|
||||
this.setState({
|
||||
|
@ -1889,7 +1902,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
}
|
||||
|
||||
private getMemoizedDimensionFiltersFromSpec = memoize(spec => {
|
||||
const { dimensionFilters } = splitFilter(deepGet(spec, 'dataSchema.transformSpec.filter'));
|
||||
const { dimensionFilters } = splitFilter(deepGet(spec, 'spec.dataSchema.transformSpec.filter'));
|
||||
return dimensionFilters;
|
||||
});
|
||||
|
||||
|
@ -1991,11 +2004,13 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
text="Apply"
|
||||
intent={Intent.PRIMARY}
|
||||
onClick={() => {
|
||||
const curFilter = splitFilter(deepGet(spec, 'dataSchema.transformSpec.filter'));
|
||||
const curFilter = splitFilter(
|
||||
deepGet(spec, 'spec.dataSchema.transformSpec.filter'),
|
||||
);
|
||||
const newFilter = joinFilter(
|
||||
deepSet(curFilter, `dimensionFilters.${selectedFilterIndex}`, selectedFilter),
|
||||
);
|
||||
this.updateSpec(deepSet(spec, 'dataSchema.transformSpec.filter', newFilter));
|
||||
this.updateSpec(deepSet(spec, 'spec.dataSchema.transformSpec.filter', newFilter));
|
||||
close();
|
||||
}}
|
||||
/>
|
||||
|
@ -2006,11 +2021,13 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
icon={IconNames.TRASH}
|
||||
intent={Intent.DANGER}
|
||||
onClick={() => {
|
||||
const curFilter = splitFilter(deepGet(spec, 'dataSchema.transformSpec.filter'));
|
||||
const curFilter = splitFilter(
|
||||
deepGet(spec, 'spec.dataSchema.transformSpec.filter'),
|
||||
);
|
||||
const newFilter = joinFilter(
|
||||
deepDelete(curFilter, `dimensionFilters.${selectedFilterIndex}`),
|
||||
);
|
||||
this.updateSpec(deepSet(spec, 'dataSchema.transformSpec.filter', newFilter));
|
||||
this.updateSpec(deepSet(spec, 'spec.dataSchema.transformSpec.filter', newFilter));
|
||||
close();
|
||||
}}
|
||||
/>
|
||||
|
@ -2037,8 +2054,8 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
renderGlobalFilterControls() {
|
||||
const { spec, showGlobalFilter } = this.state;
|
||||
const intervals: string[] = deepGet(spec, 'dataSchema.granularitySpec.intervals');
|
||||
const { restFilter } = splitFilter(deepGet(spec, 'dataSchema.transformSpec.filter'));
|
||||
const intervals: string[] = deepGet(spec, 'spec.dataSchema.granularitySpec.intervals');
|
||||
const { restFilter } = splitFilter(deepGet(spec, 'spec.dataSchema.transformSpec.filter'));
|
||||
const hasGlobalFilter = Boolean(intervals || restFilter);
|
||||
|
||||
if (showGlobalFilter) {
|
||||
|
@ -2047,7 +2064,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={[
|
||||
{
|
||||
name: 'dataSchema.granularitySpec.intervals',
|
||||
name: 'spec.dataSchema.granularitySpec.intervals',
|
||||
label: 'Time intervals',
|
||||
type: 'string-array',
|
||||
placeholder: 'ex: 2018-01-01/2018-06-01',
|
||||
|
@ -2066,9 +2083,11 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<JsonInput
|
||||
value={restFilter}
|
||||
onChange={f => {
|
||||
const curFilter = splitFilter(deepGet(spec, 'dataSchema.transformSpec.filter'));
|
||||
const curFilter = splitFilter(
|
||||
deepGet(spec, 'spec.dataSchema.transformSpec.filter'),
|
||||
);
|
||||
const newFilter = joinFilter(deepSet(curFilter, `restFilter`, f));
|
||||
this.updateSpec(deepSet(spec, 'dataSchema.transformSpec.filter', newFilter));
|
||||
this.updateSpec(deepSet(spec, 'spec.dataSchema.transformSpec.filter', newFilter));
|
||||
}}
|
||||
height="200px"
|
||||
/>
|
||||
|
@ -2096,10 +2115,10 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
async queryForSchema(initRun = false) {
|
||||
const { spec, cacheRows } = this.state;
|
||||
const inputFormatColumns: string[] =
|
||||
deepGet(spec, 'ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
const metricsSpec: MetricSpec[] = deepGet(spec, 'dataSchema.metricsSpec') || EMPTY_ARRAY;
|
||||
deepGet(spec, 'spec.ioConfig.inputFormat.columns') || EMPTY_ARRAY;
|
||||
const metricsSpec: MetricSpec[] = deepGet(spec, 'spec.dataSchema.metricsSpec') || EMPTY_ARRAY;
|
||||
const dimensionsSpec: DimensionsSpec =
|
||||
deepGet(spec, 'dataSchema.dimensionsSpec') || EMPTY_OBJECT;
|
||||
deepGet(spec, 'spec.dataSchema.dimensionsSpec') || EMPTY_OBJECT;
|
||||
|
||||
if (!cacheRows) {
|
||||
this.setState({
|
||||
|
@ -2149,7 +2168,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
selectedMetricSpec,
|
||||
selectedMetricSpecIndex,
|
||||
} = this.state;
|
||||
const rollup: boolean = Boolean(deepGet(spec, 'dataSchema.granularitySpec.rollup'));
|
||||
const rollup: boolean = Boolean(deepGet(spec, 'spec.dataSchema.granularitySpec.rollup'));
|
||||
const somethingSelected = Boolean(selectedDimensionSpec || selectedMetricSpec);
|
||||
const dimensionMode = getDimensionMode(spec);
|
||||
|
||||
|
@ -2240,7 +2259,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={[
|
||||
{
|
||||
name: 'dataSchema.dimensionsSpec.dimensionExclusions',
|
||||
name: 'spec.dataSchema.dimensionsSpec.dimensionExclusions',
|
||||
label: 'Dimension exclusions',
|
||||
type: 'string-array',
|
||||
info: (
|
||||
|
@ -2297,7 +2316,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={[
|
||||
{
|
||||
name: 'dataSchema.granularitySpec.queryGranularity',
|
||||
name: 'spec.dataSchema.granularitySpec.queryGranularity',
|
||||
label: 'Query granularity',
|
||||
type: 'string',
|
||||
suggestions: ['NONE', 'SECOND', 'MINUTE', 'HOUR', 'DAY'],
|
||||
|
@ -2418,19 +2437,24 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
};
|
||||
|
||||
if (selectedDimensionSpec) {
|
||||
const curDimensions = deepGet(spec, `dataSchema.dimensionsSpec.dimensions`) || EMPTY_ARRAY;
|
||||
const curDimensions =
|
||||
deepGet(spec, `spec.dataSchema.dimensionsSpec.dimensions`) || EMPTY_ARRAY;
|
||||
|
||||
const convertToMetric = (type: string, prefix: string) => {
|
||||
const specWithoutDimension = deepDelete(
|
||||
spec,
|
||||
`dataSchema.dimensionsSpec.dimensions.${selectedDimensionSpecIndex}`,
|
||||
`spec.dataSchema.dimensionsSpec.dimensions.${selectedDimensionSpecIndex}`,
|
||||
);
|
||||
|
||||
const specWithMetric = deepSet(specWithoutDimension, `dataSchema.metricsSpec.[append]`, {
|
||||
name: `${prefix}_${selectedDimensionSpec.name}`,
|
||||
type,
|
||||
fieldName: selectedDimensionSpec.name,
|
||||
});
|
||||
const specWithMetric = deepSet(
|
||||
specWithoutDimension,
|
||||
`spec.dataSchema.metricsSpec.[append]`,
|
||||
{
|
||||
name: `${prefix}_${selectedDimensionSpec.name}`,
|
||||
type,
|
||||
fieldName: selectedDimensionSpec.name,
|
||||
},
|
||||
);
|
||||
|
||||
this.updateSpec(specWithMetric);
|
||||
close();
|
||||
|
@ -2472,7 +2496,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
model={selectedDimensionSpec}
|
||||
onChange={selectedDimensionSpec => this.setState({ selectedDimensionSpec })}
|
||||
/>
|
||||
{selectedDimensionSpecIndex !== -1 && deepGet(spec, 'dataSchema.metricsSpec') && (
|
||||
{selectedDimensionSpecIndex !== -1 && deepGet(spec, 'spec.dataSchema.metricsSpec') && (
|
||||
<FormGroup>
|
||||
<Popover content={convertToMetricMenu}>
|
||||
<Button
|
||||
|
@ -2492,7 +2516,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
this.updateSpec(
|
||||
deepSet(
|
||||
spec,
|
||||
`dataSchema.dimensionsSpec.dimensions.${selectedDimensionSpecIndex}`,
|
||||
`spec.dataSchema.dimensionsSpec.dimensions.${selectedDimensionSpecIndex}`,
|
||||
selectedDimensionSpec,
|
||||
),
|
||||
);
|
||||
|
@ -2512,7 +2536,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
this.updateSpec(
|
||||
deepDelete(
|
||||
spec,
|
||||
`dataSchema.dimensionsSpec.dimensions.${selectedDimensionSpecIndex}`,
|
||||
`spec.dataSchema.dimensionsSpec.dimensions.${selectedDimensionSpecIndex}`,
|
||||
),
|
||||
);
|
||||
close();
|
||||
|
@ -2557,12 +2581,12 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
const convertToDimension = (type: string) => {
|
||||
const specWithoutMetric = deepDelete(
|
||||
spec,
|
||||
`dataSchema.metricsSpec.${selectedMetricSpecIndex}`,
|
||||
`spec.dataSchema.metricsSpec.${selectedMetricSpecIndex}`,
|
||||
);
|
||||
|
||||
const specWithDimension = deepSet(
|
||||
specWithoutMetric,
|
||||
`dataSchema.dimensionsSpec.dimensions.[append]`,
|
||||
`spec.dataSchema.dimensionsSpec.dimensions.[append]`,
|
||||
{
|
||||
type,
|
||||
name: selectedMetricSpec.fieldName,
|
||||
|
@ -2614,7 +2638,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
this.updateSpec(
|
||||
deepSet(
|
||||
spec,
|
||||
`dataSchema.metricsSpec.${selectedMetricSpecIndex}`,
|
||||
`spec.dataSchema.metricsSpec.${selectedMetricSpecIndex}`,
|
||||
selectedMetricSpec,
|
||||
),
|
||||
);
|
||||
|
@ -2629,7 +2653,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
intent={Intent.DANGER}
|
||||
onClick={() => {
|
||||
this.updateSpec(
|
||||
deepDelete(spec, `dataSchema.metricsSpec.${selectedMetricSpecIndex}`),
|
||||
deepDelete(spec, `spec.dataSchema.metricsSpec.${selectedMetricSpecIndex}`),
|
||||
);
|
||||
close();
|
||||
}}
|
||||
|
@ -2663,9 +2687,9 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
renderPartitionStep() {
|
||||
const { spec } = this.state;
|
||||
const tuningConfig: TuningConfig = deepGet(spec, 'tuningConfig') || EMPTY_OBJECT;
|
||||
const tuningConfig: TuningConfig = deepGet(spec, 'spec.tuningConfig') || EMPTY_OBJECT;
|
||||
const granularitySpec: GranularitySpec =
|
||||
deepGet(spec, 'dataSchema.granularitySpec') || EMPTY_OBJECT;
|
||||
deepGet(spec, 'spec.dataSchema.granularitySpec') || EMPTY_OBJECT;
|
||||
|
||||
return (
|
||||
<>
|
||||
|
@ -2696,16 +2720,16 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
},
|
||||
]}
|
||||
model={granularitySpec}
|
||||
onChange={g => this.updateSpec(deepSet(spec, 'dataSchema.granularitySpec', g))}
|
||||
onChange={g => this.updateSpec(deepSet(spec, 'spec.dataSchema.granularitySpec', g))}
|
||||
/>
|
||||
<AutoForm
|
||||
fields={[
|
||||
{
|
||||
name: 'dataSchema.granularitySpec.intervals',
|
||||
name: 'spec.dataSchema.granularitySpec.intervals',
|
||||
label: 'Time intervals',
|
||||
type: 'string-array',
|
||||
placeholder: 'ex: 2018-01-01/2018-06-01',
|
||||
required: s => Boolean(deepGet(s, 'tuningConfig.forceGuaranteedRollup')),
|
||||
required: spec => Boolean(deepGet(spec, 'spec.tuningConfig.forceGuaranteedRollup')),
|
||||
info: (
|
||||
<>
|
||||
A comma separated list of intervals for the raw data being ingested. Ignored for
|
||||
|
@ -2723,7 +2747,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={getPartitionRelatedTuningSpecFormFields(getSpecType(spec) || 'index_parallel')}
|
||||
model={tuningConfig}
|
||||
onChange={t => this.updateSpec(deepSet(spec, 'tuningConfig', t))}
|
||||
onChange={t => this.updateSpec(deepSet(spec, 'spec.tuningConfig', t))}
|
||||
/>
|
||||
</div>
|
||||
<div className="control">
|
||||
|
@ -2748,8 +2772,8 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
renderTuningStep() {
|
||||
const { spec } = this.state;
|
||||
const ioConfig: IoConfig = deepGet(spec, 'ioConfig') || EMPTY_OBJECT;
|
||||
const tuningConfig: TuningConfig = deepGet(spec, 'tuningConfig') || EMPTY_OBJECT;
|
||||
const ioConfig: IoConfig = deepGet(spec, 'spec.ioConfig') || EMPTY_OBJECT;
|
||||
const tuningConfig: TuningConfig = deepGet(spec, 'spec.tuningConfig') || EMPTY_OBJECT;
|
||||
|
||||
const ingestionComboType = getIngestionComboType(spec);
|
||||
const inputTuningFields = ingestionComboType
|
||||
|
@ -2764,7 +2788,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={inputTuningFields}
|
||||
model={ioConfig}
|
||||
onChange={c => this.updateSpec(deepSet(spec, 'ioConfig', c))}
|
||||
onChange={c => this.updateSpec(deepSet(spec, 'spec.ioConfig', c))}
|
||||
/>
|
||||
) : (
|
||||
<div>
|
||||
|
@ -2779,7 +2803,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
) : (
|
||||
<JsonInput
|
||||
value={ioConfig}
|
||||
onChange={c => this.updateSpec(deepSet(spec, 'ioConfig', c))}
|
||||
onChange={c => this.updateSpec(deepSet(spec, 'spec.ioConfig', c))}
|
||||
height="300px"
|
||||
/>
|
||||
)}
|
||||
|
@ -2789,7 +2813,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={getTuningSpecFormFields()}
|
||||
model={tuningConfig}
|
||||
onChange={t => this.updateSpec(deepSet(spec, 'tuningConfig', t))}
|
||||
onChange={t => this.updateSpec(deepSet(spec, 'spec.tuningConfig', t))}
|
||||
/>
|
||||
</div>
|
||||
<div className="control">
|
||||
|
@ -2812,7 +2836,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
|
||||
renderPublishStep() {
|
||||
const { spec } = this.state;
|
||||
const parallel = deepGet(spec, 'tuningConfig.maxNumConcurrentSubTasks') > 1;
|
||||
const parallel = deepGet(spec, 'spec.tuningConfig.maxNumConcurrentSubTasks') > 1;
|
||||
|
||||
return (
|
||||
<>
|
||||
|
@ -2821,17 +2845,17 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={[
|
||||
{
|
||||
name: 'dataSchema.dataSource',
|
||||
name: 'spec.dataSchema.dataSource',
|
||||
label: 'Datasource name',
|
||||
type: 'string',
|
||||
info: <>This is the name of the data source (table) in Druid.</>,
|
||||
},
|
||||
{
|
||||
name: 'ioConfig.appendToExisting',
|
||||
name: 'spec.ioConfig.appendToExisting',
|
||||
label: 'Append to existing',
|
||||
type: 'boolean',
|
||||
defaultValue: false,
|
||||
defined: spec => !deepGet(spec, 'tuningConfig.forceGuaranteedRollup'),
|
||||
defined: spec => !deepGet(spec, 'spec.tuningConfig.forceGuaranteedRollup'),
|
||||
info: (
|
||||
<>
|
||||
Creates segments as additional shards of the latest version, effectively
|
||||
|
@ -2849,7 +2873,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
<AutoForm
|
||||
fields={[
|
||||
{
|
||||
name: 'tuningConfig.logParseExceptions',
|
||||
name: 'spec.tuningConfig.logParseExceptions',
|
||||
label: 'Log parse exceptions',
|
||||
type: 'boolean',
|
||||
defaultValue: false,
|
||||
|
@ -2862,7 +2886,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.maxParseExceptions',
|
||||
name: 'spec.tuningConfig.maxParseExceptions',
|
||||
label: 'Max parse exceptions',
|
||||
type: 'number',
|
||||
disabled: parallel,
|
||||
|
@ -2875,7 +2899,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
),
|
||||
},
|
||||
{
|
||||
name: 'tuningConfig.maxSavedParseExceptions',
|
||||
name: 'spec.tuningConfig.maxSavedParseExceptions',
|
||||
label: 'Max saved parse exceptions',
|
||||
type: 'number',
|
||||
disabled: parallel,
|
||||
|
@ -2997,13 +3021,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
|
|||
if (isTask(spec)) {
|
||||
let taskResp: any;
|
||||
try {
|
||||
taskResp = await axios.post('/druid/indexer/v1/task', {
|
||||
type: spec.type,
|
||||
spec,
|
||||
|
||||
// A hack to let context be set from the spec can be removed when https://github.com/apache/druid/issues/8662 is resolved
|
||||
context: (spec as any).context,
|
||||
});
|
||||
taskResp = await axios.post('/druid/indexer/v1/task', spec);
|
||||
} catch (e) {
|
||||
AppToaster.show({
|
||||
message: `Failed to submit task: ${getDruidErrorMessage(e)}`,
|
||||
|
|
Loading…
Reference in New Issue