Web console: add reindex (ingestSegment firehose) to the data loader (#8181)

* tidy up nulls

* standardize more on undefined

* updated licenses

* do not do heavy handed rendering

* reindex from druid

* tidy up

* add inline firehose

* add husky

* sass lint

* better suggestion

* fix script typo

* adjust time formats

* add missing time formats

* use term 'reindex'

* fix lodash.compact
This commit is contained in:
Vadim Ogievetsky 2019-07-29 14:41:27 -07:00 committed by Fangjin Yang
parent ab71a2e1e4
commit cc4450db12
16 changed files with 410 additions and 102 deletions

View File

@ -2626,16 +2626,6 @@ license_file_path: licenses/bin/js-tokens.MIT
---
name: "lodash.compact"
license_category: binary
module: web-console
license_name: MIT License
copyright: John-David Dalton
version: 3.0.1
license_file_path: licenses/bin/lodash.compact.MIT
---
name: "lodash.debounce"
license_category: binary
module: web-console

View File

@ -1,23 +0,0 @@
The MIT License (MIT)
Copyright 2012-2016 The Dojo Foundation <http://dojofoundation.org/>
Based on Underscore.js, copyright 2009-2016 Jeremy Ashkenas,
DocumentCloud and Investigative Reporters & Editors <http://underscorejs.org/>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.6 KiB

View File

@ -1282,15 +1282,6 @@
"integrity": "sha512-0GJhzBdvsW2RUccNHOBkabI8HZVdOXmXbXhuKlDEd5Vv12P7oAVGfomGp3Ne21o5D/qu1WmthlNKFaoZJJeErA==",
"dev": true
},
"@types/lodash.compact": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/@types/lodash.compact/-/lodash.compact-3.0.6.tgz",
"integrity": "sha512-0pDKTX4alTyxH85Y5Al4YzS8oriqBQykADW6zLAHkZwNBMPXFIhdE2ctg0Z2GVcZsABxo5CI/J3vmHrFkdQBfA==",
"dev": true,
"requires": {
"@types/lodash": "*"
}
},
"@types/lodash.debounce": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/@types/lodash.debounce/-/lodash.debounce-4.0.6.tgz",
@ -1327,6 +1318,12 @@
"integrity": "sha512-gojym4tX0FWeV2gsW4Xmzo5wxGjXGm550oVUII7f7G5o4BV6c7DBdiG1RRQd+y1bvqRyYtPfMK85UM95vsapqQ==",
"dev": true
},
"@types/normalize-package-data": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz",
"integrity": "sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA==",
"dev": true
},
"@types/numeral": {
"version": "0.0.25",
"resolved": "https://registry.npmjs.org/@types/numeral/-/numeral-0.0.25.tgz",
@ -6091,6 +6088,106 @@
"integrity": "sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=",
"dev": true
},
"husky": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/husky/-/husky-3.0.1.tgz",
"integrity": "sha512-PXBv+iGKw23GHUlgELRlVX9932feFL407/wHFwtsGeArp0dDM4u+/QusSQwPKxmNgjpSL+ustbOdQ2jetgAZbA==",
"dev": true,
"requires": {
"chalk": "^2.4.2",
"cosmiconfig": "^5.2.1",
"execa": "^1.0.0",
"get-stdin": "^7.0.0",
"is-ci": "^2.0.0",
"opencollective-postinstall": "^2.0.2",
"pkg-dir": "^4.2.0",
"please-upgrade-node": "^3.1.1",
"read-pkg": "^5.1.1",
"run-node": "^1.0.0",
"slash": "^3.0.0"
},
"dependencies": {
"find-up": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
"integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
"dev": true,
"requires": {
"locate-path": "^5.0.0",
"path-exists": "^4.0.0"
}
},
"get-stdin": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-7.0.0.tgz",
"integrity": "sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==",
"dev": true
},
"locate-path": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
"integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
"dev": true,
"requires": {
"p-locate": "^4.1.0"
}
},
"p-locate": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
"integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
"dev": true,
"requires": {
"p-limit": "^2.2.0"
}
},
"parse-json": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.0.0.tgz",
"integrity": "sha512-OOY5b7PAEFV0E2Fir1KOkxchnZNCdowAJgQ5NuxjpBKTRP3pQhwkrkxqQjeoKJ+fO7bCpmIZaogI4eZGDMEGOw==",
"dev": true,
"requires": {
"@babel/code-frame": "^7.0.0",
"error-ex": "^1.3.1",
"json-parse-better-errors": "^1.0.1",
"lines-and-columns": "^1.1.6"
}
},
"path-exists": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
"integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
"dev": true
},
"pkg-dir": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
"integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
"dev": true,
"requires": {
"find-up": "^4.0.0"
}
},
"read-pkg": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz",
"integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==",
"dev": true,
"requires": {
"@types/normalize-package-data": "^2.4.0",
"normalize-package-data": "^2.5.0",
"parse-json": "^5.0.0",
"type-fest": "^0.6.0"
}
},
"slash": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz",
"integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==",
"dev": true
}
}
},
"iconv-lite": {
"version": "0.4.24",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz",
@ -7487,11 +7584,6 @@
"integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=",
"dev": true
},
"lodash.compact": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/lodash.compact/-/lodash.compact-3.0.1.tgz",
"integrity": "sha1-VAzjg3dFl1gHRx4WtKK6IeclbKU="
},
"lodash.debounce": {
"version": "4.0.8",
"resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz",
@ -8730,6 +8822,12 @@
"wrappy": "1"
}
},
"opencollective-postinstall": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/opencollective-postinstall/-/opencollective-postinstall-2.0.2.tgz",
"integrity": "sha512-pVOEP16TrAO2/fjej1IdOyupJY8KDUM1CvsaScRbw6oddvpQoOfGk4ywha0HKKVAD6RkW4x6Q+tNBwhf3Bgpuw==",
"dev": true
},
"opener": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/opener/-/opener-1.5.1.tgz",
@ -9150,6 +9248,15 @@
"find-up": "^3.0.0"
}
},
"please-upgrade-node": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/please-upgrade-node/-/please-upgrade-node-3.1.1.tgz",
"integrity": "sha512-KY1uHnQ2NlQHqIJQpnh/i54rKkuxCEBx+voJIS/Mvb+L2iYd2NMotwduhKTMjfC1uKoX3VXOxLjIYG66dfJTVQ==",
"dev": true,
"requires": {
"semver-compare": "^1.0.0"
}
},
"pn": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/pn/-/pn-1.1.0.tgz",
@ -10788,6 +10895,12 @@
"integrity": "sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA==",
"dev": true
},
"run-node": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/run-node/-/run-node-1.0.0.tgz",
"integrity": "sha512-kc120TBlQ3mih1LSzdAJXo4xn/GWS2ec0l3S+syHDXP9uRr0JAT8Qd3mdMuyjqCzeZktgP3try92cEgf9Nks8A==",
"dev": true
},
"run-queue": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/run-queue/-/run-queue-1.0.3.tgz",
@ -11150,6 +11263,12 @@
"integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==",
"dev": true
},
"semver-compare": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz",
"integrity": "sha1-De4hahyUGrN+nvsXiPavxf9VN/w=",
"dev": true
},
"send": {
"version": "0.17.1",
"resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz",
@ -13089,6 +13208,12 @@
"prelude-ls": "~1.1.2"
}
},
"type-fest": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz",
"integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==",
"dev": true
},
"type-is": {
"version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",

View File

@ -24,6 +24,12 @@
"<rootDir>src/setup-tests.ts"
]
},
"husky": {
"hooks": {
"pre-commit": "npm run tslint-changed-only && npm run sasslint-changed-only",
"pre-push": "npm run tslint && npm run sasslint"
}
},
"prettier": {
"trailingComma": "all",
"tabWidth": 2,
@ -35,17 +41,20 @@
"scripts": {
"compile": "./script/build",
"pretest": "./script/build",
"test": "npm run tslint && npm run stylelint && jest --silent 2>&1",
"test": "npm run tslint && npm run sasslint && jest --silent 2>&1",
"coverage": "jest --coverage",
"update-snapshots": "jest -u",
"tslint": "./node_modules/.bin/tslint -c tslint.json --project tsconfig.json --formatters-dir ./node_modules/awesome-code-style/formatter 'src/**/*.ts?(x)'",
"tslint-fix": "npm run tslint -- --fix",
"tslint-changed-only": "git diff --diff-filter=ACMR --cached --name-only | grep -E \\.tsx\\?$ | xargs ./node_modules/.bin/tslint -c tslint.json --project tsconfig.json --formatters-dir ./node_modules/awesome-code-style/formatter",
"tslint-fix-changed-only": "npm run tslint-changed-only -- --fix",
"sasslint": "./node_modules/.bin/stylelint --config sasslint.json 'src/**/*.scss'",
"sasslint-fix": "npm run sasslint -- --fix",
"sasslint-changed-only": "git diff --diff-filter=ACMR --name-only | grep -E \\.scss$ | xargs ./node_modules/.bin/stylelint --config sasslint.json",
"sasslint-fix-changed-only": "npm run sasslint-changed-only -- --fix",
"generate-licenses-file": "license-checker --production --json --out licenses.json",
"check-licenses": "license-checker --production --onlyAllow 'Apache-1.1;Apache-2.0;BSD-2-Clause;BSD-3-Clause;MIT;CC0-1.0' --summary",
"start": "webpack-dev-server --hot --open",
"stylelint": "stylelint 'src/**/*.scss'"
"start": "webpack-dev-server --hot --open"
},
"dependencies": {
"@blueprintjs/core": "^3.17.1",
@ -60,7 +69,6 @@
"file-saver": "^2.0.2",
"has-own-prop": "^2.0.0",
"hjson": "^3.1.2",
"lodash.compact": "^3.0.1",
"lodash.debounce": "^4.0.8",
"lodash.escape": "^4.0.1",
"memoize-one": "^5.0.5",
@ -86,7 +94,6 @@
"@types/file-saver": "^2.0.1",
"@types/hjson": "^2.4.1",
"@types/jest": "^24.0.15",
"@types/lodash.compact": "^3.0.6",
"@types/lodash.debounce": "^4.0.6",
"@types/lodash.escape": "^4.0.6",
"@types/memoize-one": "^4.1.1",
@ -105,6 +112,7 @@
"enzyme-adapter-react-16": "^1.14.0",
"enzyme-to-json": "^3.3.5",
"fs-extra": "^8.1.0",
"husky": "^3.0.1",
"identity-obj-proxy": "^3.0.0",
"ignore-styles": "^5.0.1",
"jest": "^24.8.0",

View File

@ -0,0 +1,5 @@
{
"extends": "awesome-code-style/sasslint.json",
"rules": {
}
}

View File

@ -17,9 +17,10 @@
*/
import { TextArea } from '@blueprintjs/core';
import compact from 'lodash.compact';
import React from 'react';
import { compact } from '../../utils';
export interface ArrayInputProps {
className?: string;
values: string[];

View File

@ -205,6 +205,7 @@ export class AutoForm<T extends Record<string, any>> extends React.PureComponent
value={deepGet(model as any, field.name)}
onChange={(v: any) => this.fieldChange(field, v)}
updateInputValidity={updateInputValidity}
placeholder={field.placeholder}
/>
);
}

View File

@ -25,6 +25,7 @@ interface JSONInputProps {
onChange: (newJSONValue: any) => void;
value: any;
updateInputValidity?: (valueValid: boolean) => void;
placeholder?: string;
focus?: boolean;
width?: string;
height?: string;
@ -59,7 +60,7 @@ export class JSONInput extends React.PureComponent<JSONInputProps, JSONInputStat
}
render(): JSX.Element {
const { onChange, updateInputValidity, focus, width, height } = this.props;
const { onChange, updateInputValidity, placeholder, focus, width, height } = this.props;
const { stringValue } = this.state;
return (
<AceEditor
@ -79,6 +80,7 @@ export class JSONInput extends React.PureComponent<JSONInputProps, JSONInputStat
showPrintMargin={false}
showGutter={false}
value={stringValue}
placeholder={placeholder}
editorProps={{
$blockScrolling: Infinity,
}}

View File

@ -18,7 +18,8 @@
import axios from 'axios';
import { AxiosResponse } from 'axios';
import compact from 'lodash.compact';
import { assemble } from './general';
export function parseHtmlError(htmlStr: string): string | undefined {
const startIndex = htmlStr.indexOf('</h3><pre>');
@ -37,12 +38,12 @@ export function getDruidErrorMessage(e: any) {
switch (typeof data) {
case 'object':
return (
compact([
assemble(
data.error,
data.errorMessage,
data.errorClass,
data.host ? `on host ${data.host}` : undefined,
]).join(' / ') || e.message
).join(' / ') || e.message
);
case 'string':

View File

@ -18,9 +18,9 @@
import { jodaFormatToRegExp } from './joda-to-regexp';
export const BASIC_FORMAT_VALUES: string[] = ['iso', 'millis', 'posix'];
export const BASIC_TIME_FORMATS: string[] = ['iso', 'posix', 'millis', 'micro', 'nano'];
export const DATE_FORMAT_VALUES: string[] = [
export const DATE_ONLY_TIME_FORMATS: string[] = [
'dd/MM/yyyy',
'MM/dd/yyyy',
'd/M/yy',
@ -29,7 +29,7 @@ export const DATE_FORMAT_VALUES: string[] = [
'M/d/yyyy',
];
export const DATE_TIME_FORMAT_VALUES: string[] = [
export const DATETIME_TIME_FORMATS: string[] = [
'd/M/yyyy H:mm:ss',
'M/d/yyyy H:mm:ss',
'MM/dd/yyyy hh:mm:ss a',
@ -37,38 +37,45 @@ export const DATE_TIME_FORMAT_VALUES: string[] = [
'yyyy-MM-dd HH:mm:ss.S',
];
const ALL_FORMAT_VALUES: string[] = BASIC_FORMAT_VALUES.concat(
DATE_FORMAT_VALUES,
DATE_TIME_FORMAT_VALUES,
export const OTHER_TIME_FORMATS: string[] = ['MMM dd HH:mm:ss'];
const ALL_FORMAT_VALUES: string[] = BASIC_TIME_FORMATS.concat(
DATE_ONLY_TIME_FORMATS,
DATETIME_TIME_FORMATS,
OTHER_TIME_FORMATS,
);
const EXAMPLE_DATE_ISO = '2015-10-29T23:00:00.000Z';
const EXAMPLE_DATE_VALUE = Date.parse(EXAMPLE_DATE_ISO);
const MIN_MILLIS = 3.15576e11; // 3 years in millis, so Tue Jan 01 1980
const MAX_MILLIS = EXAMPLE_DATE_VALUE * 10;
const MIN_POSIX = MIN_MILLIS / 1000;
const MAX_POSIX = MAX_MILLIS / 1000;
const MIN_POSIX = 3.15576e8; // 3 years in posix, so Tue Jan 01 1980
const MIN_MILLIS = MIN_POSIX * 1000;
const MIN_MICRO = MIN_MILLIS * 1000;
const MIN_NANO = MIN_MICRO * 1000;
const MAX_NANO = MIN_NANO * 1000;
// copied from http://goo.gl/0ejHHW with small tweak to make dddd not pass on its own
// tslint:disable-next-line:max-line-length
export const ISO_MATCHER = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))(T((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)$/;
export function timeFormatMatches(format: string, value: string | number): boolean {
if (format === 'iso') {
return ISO_MATCHER.test(String(value));
}
const absValue = Math.abs(Number(value));
switch (format) {
case 'iso':
return ISO_MATCHER.test(String(value));
if (format === 'millis') {
const absValue = Math.abs(Number(value));
return MIN_MILLIS < absValue && absValue < MAX_MILLIS;
}
case 'posix':
return MIN_POSIX < absValue && absValue < MIN_MILLIS;
if (format === 'posix') {
const absValue = Math.abs(Number(value));
return MIN_POSIX < absValue && absValue < MAX_POSIX;
}
case 'millis':
return MIN_MILLIS < absValue && absValue < MIN_MICRO;
return jodaFormatToRegExp(format).test(String(value));
case 'micro':
return MIN_MICRO < absValue && absValue < MIN_NANO;
case 'nano':
return MIN_NANO < absValue && absValue < MAX_NANO;
default:
return jodaFormatToRegExp(format).test(String(value));
}
}
export function possibleDruidFormatForValues(values: any[]): string | null {

View File

@ -277,6 +277,14 @@ export function filterMap<T, Q>(xs: T[], f: (x: T, i: number) => Q | undefined):
return xs.map(f).filter((x: Q | undefined) => typeof x !== 'undefined') as Q[];
}
export function compact<T>(xs: (T | undefined | false | null | '')[]): T[] {
return xs.filter(Boolean) as T[];
}
export function assemble<T>(...xs: (T | undefined | false | null | '')[]): T[] {
return xs.filter(Boolean) as T[];
}
export function alphanumericCompare(a: string, b: string): number {
return String(a).localeCompare(b, undefined, { numeric: true });
}

View File

@ -22,13 +22,20 @@ import React from 'react';
import { Field } from '../components/auto-form/auto-form';
import { ExternalLink } from '../components/external-link/external-link';
import { BASIC_FORMAT_VALUES, DATE_FORMAT_VALUES, DATE_TIME_FORMAT_VALUES } from './druid-time';
import {
BASIC_TIME_FORMATS,
DATE_ONLY_TIME_FORMATS,
DATETIME_TIME_FORMATS,
OTHER_TIME_FORMATS,
} from './druid-time';
import { deepGet, deepSet } from './object-change';
// These constants are used to make sure that they are not constantly recreated thrashing the pure components
export const EMPTY_OBJECT: any = {};
export const EMPTY_ARRAY: any[] = [];
const CURRENT_YEAR = new Date().getUTCFullYear();
export interface IngestionSpec {
type?: IngestionType;
dataSchema: DataSchema;
@ -48,6 +55,8 @@ export type IngestionComboType =
| 'kinesis'
| 'index:http'
| 'index:local'
| 'index:ingestSegment'
| 'index:inline'
| 'index:static-s3'
| 'index:static-google-blobstore';
@ -84,9 +93,11 @@ export function getIngestionComboType(spec: IngestionSpec): IngestionComboType |
switch (firehose.type) {
case 'local':
case 'http':
case 'ingestSegment':
case 'inline':
case 'static-s3':
case 'static-google-blobstore':
return `index:${firehose.type}` as any;
return `index:${firehose.type}` as IngestionComboType;
}
}
@ -101,6 +112,12 @@ export function getIngestionTitle(ingestionType: IngestionComboTypeWithExtra): s
case 'index:http':
return 'HTTP(s)';
case 'index:ingestSegment':
return 'Reindex from Druid';
case 'index:inline':
return 'Paste data';
case 'index:static-s3':
return 'Amazon S3';
@ -129,7 +146,7 @@ export function getIngestionTitle(ingestionType: IngestionComboTypeWithExtra): s
export function getIngestionImage(ingestionType: IngestionComboTypeWithExtra): string {
const parts = ingestionType.split(':');
if (parts.length === 2) return parts[1];
if (parts.length === 2) return parts[1].toLowerCase();
return ingestionType;
}
@ -369,14 +386,18 @@ const TIMESTAMP_SPEC_FORM_FIELDS: Field<TimestampSpec>[] = [
defaultValue: 'auto',
suggestions: [
'auto',
...BASIC_FORMAT_VALUES,
...BASIC_TIME_FORMATS,
{
group: 'Date and time formats',
suggestions: DATE_TIME_FORMAT_VALUES,
suggestions: DATETIME_TIME_FORMATS,
},
{
group: 'Date only formats',
suggestions: DATE_FORMAT_VALUES,
suggestions: DATE_ONLY_TIME_FORMATS,
},
{
group: 'Other time formats',
suggestions: OTHER_TIME_FORMATS,
},
],
isDefined: (timestampSpec: TimestampSpec) => isColumnTimestampSpec(timestampSpec),
@ -710,11 +731,21 @@ export interface IoConfig {
export interface Firehose {
type: string;
baseDir?: string;
filter?: string;
filter?: any;
uris?: string[];
prefixes?: string[];
blobs?: { bucket: string; path: string }[];
fetchTimeout?: number;
// ingestSegment
dataSource?: string;
interval?: string;
dimensions?: string[];
metrics?: string[];
maxInputSegmentBytesPerTask?: number;
// inline
data?: string;
}
export function getIoConfigFormFields(ingestionComboType: IngestionComboType): Field<IoConfig>[] {
@ -794,6 +825,84 @@ export function getIoConfigFormFields(ingestionComboType: IngestionComboType): F
},
];
case 'index:ingestSegment':
return [
firehoseType,
{
name: 'firehose.dataSource',
label: 'Datasource',
type: 'string',
info: <p>The datasource to fetch rows from.</p>,
},
{
name: 'firehose.interval',
label: 'Interval',
type: 'string',
placeholder: `${CURRENT_YEAR}-01-01/${CURRENT_YEAR + 1}-01-01`,
suggestions: [
`${CURRENT_YEAR}/${CURRENT_YEAR + 1}`,
`${CURRENT_YEAR}-01-01/${CURRENT_YEAR + 1}-01-01`,
`${CURRENT_YEAR}-01-01T00:00:00/${CURRENT_YEAR + 1}-01-01T00:00:00`,
],
info: (
<p>
A String representing ISO-8601 Interval. This defines the time range to fetch the data
over.
</p>
),
},
{
name: 'firehose.dimensions',
label: 'Dimensions',
type: 'string-array',
placeholder: '(optional)',
info: (
<p>
The list of dimensions to select. If left empty, no dimensions are returned. If left
null or not defined, all dimensions are returned.
</p>
),
},
{
name: 'firehose.metrics',
label: 'Metrics',
type: 'string-array',
placeholder: '(optional)',
info: (
<p>
The list of metrics to select. If left empty, no metrics are returned. If left null or
not defined, all metrics are selected.
</p>
),
},
{
name: 'firehose.filter',
label: 'Filter',
type: 'json',
placeholder: '(optional)',
info: (
<p>
The{' '}
<ExternalLink href="https://druid.apache.org/docs/latest/querying/filters.html">
filter
</ExternalLink>{' '}
to apply to the data as part of querying.
</p>
),
},
];
case 'index:inline':
return [
firehoseType,
{
name: 'firehose.data',
label: 'Data',
type: 'string',
info: <p>The data to ingest.</p>,
},
];
case 'index:static-s3':
return [
firehoseType,
@ -962,8 +1071,8 @@ function issueWithFirehose(firehose: Firehose | undefined): string | undefined {
if (!firehose.type) return 'missing a type';
switch (firehose.type) {
case 'local':
if (!firehose.baseDir) return "must have a 'baseDir'";
if (!firehose.filter) return "must have a 'filter'";
if (!firehose.baseDir) return `must have a 'baseDir'`;
if (!firehose.filter) return `must have a 'filter'`;
break;
case 'http':
@ -972,6 +1081,15 @@ function issueWithFirehose(firehose: Firehose | undefined): string | undefined {
}
break;
case 'ingestSegment':
if (!firehose.dataSource) return `must have a 'dataSource'`;
if (!firehose.interval) return `must have an 'interval'`;
break;
case 'inline':
if (!firehose.data) return `must have 'data'`;
break;
case 'static-s3':
if (!nonEmptyArray(firehose.uris) && !nonEmptyArray(firehose.prefixes)) {
return 'must have at least one uri or prefix';
@ -1082,8 +1200,27 @@ export function getIoConfigTuningFormFields(
];
case 'index:local':
case 'index:inline':
return [];
case 'index:ingestSegment':
return [
{
name: 'firehose.maxFetchCapacityBytes',
label: 'Max fetch capacity bytes',
type: 'number',
defaultValue: 157286400,
info: (
<p>
When used with the native parallel index task, the maximum number of bytes of input
segments to process in a single task. If a single segment is larger than this number,
it will be processed by itself in a single task (input segments are never split across
tasks). Defaults to 150MB.
</p>
),
},
];
case 'kafka':
case 'kinesis':
return [
@ -1337,6 +1474,12 @@ export function guessDataSourceName(ioConfig: IoConfig): string | undefined {
case 'http':
return Array.isArray(firehose.uris) ? filenameFromPath(firehose.uris[0]) : undefined;
case 'ingestSegment':
return firehose.dataSource;
case 'inline':
return 'inline_data';
}
return;
@ -1821,10 +1964,19 @@ export function updateIngestionType(
}
export function fillParser(spec: IngestionSpec, sampleData: string[]): IngestionSpec {
if (deepGet(spec, 'ioConfig.firehose.type') === 'sql') {
const firehoseType = deepGet(spec, 'ioConfig.firehose.type');
if (firehoseType === 'sql') {
return deepSet(spec, 'dataSchema.parser', { type: 'map' });
}
if (firehoseType === 'ingestSegment') {
return deepSet(spec, 'dataSchema.parser', {
type: 'string',
parseSpec: { format: 'timeAndDims' },
});
}
const parseSpec = guessParseSpec(sampleData);
if (!parseSpec) return spec;

View File

@ -63,6 +63,7 @@
.main {
height: 100%;
padding: 0;
overflow: auto;
.bp3-card {
position: relative;

View File

@ -120,6 +120,7 @@ import {
getOverlordModules,
HeaderAndRows,
headerAndRowsFromSampleResponse,
SampleEntry,
sampleForConnect,
sampleForFilter,
sampleForParser,
@ -139,14 +140,19 @@ import { TransformTable } from './transform-table/transform-table';
import './load-data-view.scss';
function showRawLine(line: string): string {
if (line.includes('\n')) {
return `<Multi-line row, length: ${line.length}>`;
function showRawLine(line: SampleEntry): string {
const raw = line.raw;
if (raw.includes('\n')) {
return `[Multi-line row, length: ${raw.length}]`;
}
if (line.length > 1000) {
return line.substr(0, 1000) + '...';
if (raw.length > 1000) {
return raw.substr(0, 1000) + '...';
}
return line;
return raw;
}
function showBlankLine(line: SampleEntry): string {
return line.parsed ? `[Row: ${JSON.stringify(line.parsed)}]` : '[Binary data]';
}
function getTimestampSpec(headerAndRows: HeaderAndRows | null): TimestampSpec {
@ -244,7 +250,7 @@ export interface LoadDataViewState {
specialColumnsOnly: boolean;
// for ioConfig
inputQueryState: QueryState<string[]>;
inputQueryState: QueryState<SampleEntry[]>;
// for parser
parserQueryState: QueryState<HeaderAndRows>;
@ -551,8 +557,10 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
{this.renderIngestionCard('index:static-s3')}
{this.renderIngestionCard('index:static-google-blobstore')}
{this.renderIngestionCard('hadoop')}
{this.renderIngestionCard('index:ingestSegment')}
{this.renderIngestionCard('index:http')}
{this.renderIngestionCard('index:local')}
{this.renderIngestionCard('index:inline')}
{/* this.renderIngestionCard('example') */}
{this.renderIngestionCard('other')}
</div>
@ -611,6 +619,24 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
</>
);
case 'index:ingestSegment':
return (
<>
<p>Reindex data from existing Druid segments.</p>
<p>
Reindexing data allows you to filter rows, add, transform, and delete columns, as well
as change the partitioning of the data.
</p>
</>
);
case 'index:inline':
return (
<>
<p>Ingest a small amount of data directly from the clipboard.</p>
</>
);
case 'index:static-s3':
return <p>Load text based data from Amazon S3.</p>;
@ -672,6 +698,8 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
switch (selectedComboType) {
case 'index:http':
case 'index:local':
case 'index:ingestSegment':
case 'index:inline':
case 'index:static-s3':
case 'index:static-google-blobstore':
case 'kafka':
@ -812,7 +840,7 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
this.setState({
cacheKey: sampleResponse.cacheKey,
inputQueryState: new QueryState({ data: sampleResponse.data.map((d: any) => d.raw) }),
inputQueryState: new QueryState({ data: sampleResponse.data }),
});
}
@ -841,8 +869,8 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
className="raw-lines"
value={
inputData.length
? (inputData.every(l => !l)
? inputData.map(_ => '<Binary data>')
? (inputData.every(l => !l.raw)
? inputData.map(showBlankLine)
: inputData.map(showRawLine)
).join('\n')
: 'No data returned from sampler'
@ -910,7 +938,9 @@ export class LoadDataView extends React.PureComponent<LoadDataViewProps, LoadDat
disabled: !inputQueryState.data,
onNextStep: () => {
if (!inputQueryState.data) return;
this.updateSpec(fillDataSourceName(fillParser(spec, inputQueryState.data)));
this.updateSpec(
fillDataSourceName(fillParser(spec, inputQueryState.data.map(l => l.raw))),
);
},
})}
</>