Web console: show segment sizes in rows not bytes (#10496)

* added query error suggestions

* simplify the SQLs

* change segment size display to rows

* suggestion tests

* update snapshot

* make error detection more robust

* remove errant console log

* fix imports

* put suggestion on top

* better error rendering

* format as millions

* add .druid.pid to gitignore

* rename segment_size to segment_rows, fix visability, fix divide by zero

* update snapshots
This commit is contained in:
Vadim Ogievetsky 2020-10-13 13:19:39 -07:00 committed by GitHub
parent 567e381705
commit e8c5893c34
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 287 additions and 78 deletions

View File

@ -16,3 +16,4 @@ lib/sql-docs.js
tscommand-*.tmp.txt
licenses.json
.druid.pid

View File

@ -49,12 +49,6 @@ As part of this repo:
- `script/` - Some helper bash scripts for running this console
- `src/` - This directory (together with `lib`) constitutes all the source code for this console
Generated/copied dynamically
- `index.html` - Entry file for the coordinator console
- `pages/` - The files for the older coordinator console
- `coordinator-console/` - Files for the coordinator console
## List of non SQL data reading APIs used
```

View File

@ -6,7 +6,7 @@
"license": "Apache-2.0",
"repository": {
"type": "git",
"url": "https://github.com/apache/druid/"
"url": "https://github.com/apache/druid"
},
"jest": {
"preset": "ts-jest",

View File

@ -16,10 +16,12 @@
* limitations under the License.
*/
import { sane } from 'druid-query-toolkit/build/test-utils';
import { DruidError } from './druid-query';
describe('DruidQuery', () => {
describe('DruidError', () => {
describe('DruidError.parsePosition', () => {
it('works for single error 1', () => {
const message = `Encountered "COUNT" at line 2, column 12. Was expecting one of: <EOF> "AS" ... "EXCEPT" ... "FETCH" ... "FROM" ... "INTERSECT" ... "LIMIT" ...`;
@ -52,4 +54,78 @@ describe('DruidQuery', () => {
});
});
});
describe('DruidError.getSuggestion', () => {
it('works for ==', () => {
const sql = sane`
SELECT *
FROM wikipedia -- test ==
WHERE channel == '#ar.wikipedia'
`;
const suggestion = DruidError.getSuggestion(`Encountered "= =" at line 3, column 15.`);
expect(suggestion!.label).toEqual(`Replace == with =`);
expect(suggestion!.fn(sql)).toEqual(sane`
SELECT *
FROM wikipedia -- test ==
WHERE channel = '#ar.wikipedia'
`);
});
it('works for == 2', () => {
const sql = sane`
SELECT
channel, COUNT(*) AS "Count"
FROM wikipedia
WHERE channel == 'de'
GROUP BY 1
ORDER BY 2 DESC
`;
const suggestion = DruidError.getSuggestion(
`Encountered "= =" at line 4, column 15. Was expecting one of: <EOF> "EXCEPT" ... "FETCH" ... "GROUP" ...`,
);
expect(suggestion!.label).toEqual(`Replace == with =`);
expect(suggestion!.fn(sql)).toEqual(sane`
SELECT
channel, COUNT(*) AS "Count"
FROM wikipedia
WHERE channel = 'de'
GROUP BY 1
ORDER BY 2 DESC
`);
});
it('works for incorrectly quoted literal', () => {
const sql = sane`
SELECT *
FROM wikipedia -- test "#ar.wikipedia"
WHERE channel = "#ar.wikipedia"
`;
const suggestion = DruidError.getSuggestion(
`org.apache.calcite.runtime.CalciteContextException: From line 3, column 17 to line 3, column 31: Column '#ar.wikipedia' not found in any table`,
);
expect(suggestion!.label).toEqual(`Replace "#ar.wikipedia" with '#ar.wikipedia'`);
expect(suggestion!.fn(sql)).toEqual(sane`
SELECT *
FROM wikipedia -- test "#ar.wikipedia"
WHERE channel = '#ar.wikipedia'
`);
});
it('removes comma (,) before FROM', () => {
const suggestion = DruidError.getSuggestion(
`Encountered "FROM" at line 1, column 14. Was expecting one of: "ABS" ...`,
);
expect(suggestion!.label).toEqual(`Remove , before FROM`);
expect(suggestion!.fn(`SELECT page, FROM wikipedia WHERE channel = '#ar.wikipedia'`)).toEqual(
`SELECT page FROM wikipedia WHERE channel = '#ar.wikipedia'`,
);
});
it('does nothing there there is nothing to do', () => {
const suggestion = DruidError.getSuggestion(
`Encountered "channel" at line 1, column 35. Was expecting one of: <EOF> "EXCEPT" ...`,
);
expect(suggestion).toBeUndefined();
});
});
});

View File

@ -31,6 +31,11 @@ export interface DruidErrorResponse {
host?: string;
}
export interface QuerySuggestion {
label: string;
fn: (query: string) => string | undefined;
}
export function parseHtmlError(htmlStr: string): string | undefined {
const startIndex = htmlStr.indexOf('</h3><pre>');
const endIndex = htmlStr.indexOf('\n\tat');
@ -92,12 +97,77 @@ export class DruidError extends Error {
return;
}
static positionToIndex(str: string, line: number, column: number): number {
const lines = str.split('\n').slice(0, line);
const lastLineIndex = lines.length - 1;
lines[lastLineIndex] = lines[lastLineIndex].slice(0, column - 1);
return lines.join('\n').length;
}
static getSuggestion(errorMessage: string): QuerySuggestion | undefined {
// == is used instead of =
// ex: Encountered "= =" at line 3, column 15. Was expecting one of
const matchEquals = errorMessage.match(/Encountered "= =" at line (\d+), column (\d+)./);
if (matchEquals) {
const line = Number(matchEquals[1]);
const column = Number(matchEquals[2]);
return {
label: `Replace == with =`,
fn: str => {
const index = DruidError.positionToIndex(str, line, column);
if (!str.slice(index).startsWith('==')) return;
return `${str.slice(0, index)}=${str.slice(index + 2)}`;
},
};
}
// Incorrect quoting on table
// ex: org.apache.calcite.runtime.CalciteContextException: From line 3, column 17 to line 3, column 31: Column '#ar.wikipedia' not found in any table
const matchQuotes = errorMessage.match(
/org.apache.calcite.runtime.CalciteContextException: From line (\d+), column (\d+) to line \d+, column \d+: Column '([^']+)' not found in any table/,
);
if (matchQuotes) {
const line = Number(matchQuotes[1]);
const column = Number(matchQuotes[2]);
const literalString = matchQuotes[3];
return {
label: `Replace "${literalString}" with '${literalString}'`,
fn: str => {
const index = DruidError.positionToIndex(str, line, column);
if (!str.slice(index).startsWith(`"${literalString}"`)) return;
return `${str.slice(0, index)}'${literalString}'${str.slice(
index + literalString.length + 2,
)}`;
},
};
}
// , before FROM
const matchComma = errorMessage.match(/Encountered "(FROM)" at/i);
if (matchComma) {
const fromKeyword = matchComma[1];
return {
label: `Remove , before ${fromKeyword}`,
fn: str => {
const newQuery = str.replace(/,(\s+FROM)/gim, '$1');
if (newQuery === str) return;
return newQuery;
},
};
}
return;
}
public canceled?: boolean;
public error?: string;
public errorMessage?: string;
public errorMessageWithoutExpectation?: string;
public expectation?: string;
public position?: RowColumn;
public errorClass?: string;
public host?: string;
public suggestion?: QuerySuggestion;
constructor(e: any) {
super(axios.isCancel(e) ? CANCELED_MESSAGE : getDruidErrorMessage(e));
@ -126,6 +196,15 @@ export class DruidError extends Error {
if (this.errorMessage) {
this.position = DruidError.parsePosition(this.errorMessage);
this.suggestion = DruidError.getSuggestion(this.errorMessage);
const expectationIndex = this.errorMessage.indexOf('Was expecting one of');
if (expectationIndex >= 0) {
this.errorMessageWithoutExpectation = this.errorMessage.slice(0, expectationIndex).trim();
this.expectation = this.errorMessage.slice(expectationIndex).trim();
} else {
this.errorMessageWithoutExpectation = this.errorMessage;
}
}
}
}

View File

@ -22,6 +22,7 @@ import {
formatBytesCompact,
formatInteger,
formatMegabytes,
formatMillions,
formatPercent,
sortWithPrefixSuffix,
sqlQueryCustomTableFilter,
@ -118,4 +119,13 @@ describe('general', () => {
expect(formatPercent(2 / 3)).toEqual('66.67%');
});
});
describe('formatMillions', () => {
it('works', () => {
expect(formatMillions(1e6)).toEqual('1.000 M');
expect(formatMillions(1e6 + 1)).toEqual('1.000 M');
expect(formatMillions(1234567)).toEqual('1.235 M');
expect(formatMillions(345.2)).toEqual('345');
});
});
});

View File

@ -235,6 +235,12 @@ export function formatPercent(n: number): string {
return (n * 100).toFixed(2) + '%';
}
export function formatMillions(n: number): string {
const s = (n / 1e6).toFixed(3);
if (s === '0.000') return String(Math.round(n));
return s + ' M';
}
function pad2(str: string | number): string {
return ('00' + str).substr(-2);
}

View File

@ -184,14 +184,14 @@ exports[`data source view matches snapshot 1`] = `
Object {
"Cell": [Function],
"Header": <React.Fragment>
Segment size (MB)
Segment size (rows)
<br />
min / avg / max
minimum / average / maximum
</React.Fragment>,
"accessor": "avg_segment_size",
"accessor": "avg_segment_rows",
"filterable": false,
"show": true,
"width": 150,
"width": 220,
},
Object {
"Cell": [Function],

View File

@ -48,7 +48,7 @@ import {
formatBytes,
formatCompactionConfigAndStatus,
formatInteger,
formatMegabytes,
formatMillions,
formatPercent,
getDruidErrorMessage,
LocalStorageKeys,
@ -88,7 +88,6 @@ const tableColumns: Record<CapabilitiesMode, string[]> = {
'Availability',
'Segment load/drop queues',
'Total data size',
'Segment size',
'Compaction',
'% Compacted',
'Left to be compacted',
@ -120,7 +119,7 @@ function formatLoadDrop(segmentsToLoad: number, segmentsToDrop: number): string
}
const formatTotalDataSize = formatBytes;
const formatSegmentSize = formatMegabytes;
const formatSegmentRows = formatMillions;
const formatTotalRows = formatInteger;
const formatAvgRowSize = formatInteger;
const formatReplicatedSize = formatBytes;
@ -144,42 +143,41 @@ function progress(done: number, awaiting: number): number {
const PERCENT_BRACES = [formatPercent(1)];
interface Datasource {
datasource: string;
rules: Rule[];
compactionConfig?: CompactionConfig;
compactionStatus?: CompactionStatus;
[key: string]: any;
interface DatasourceQueryResultRow {
readonly datasource: string;
readonly num_segments: number;
readonly num_available_segments: number;
readonly num_segments_to_load: number;
readonly num_segments_to_drop: number;
readonly total_data_size: number;
readonly replicated_size: number;
readonly min_segment_rows: number;
readonly avg_segment_rows: number;
readonly max_segment_rows: number;
readonly total_rows: number;
readonly avg_row_size: number;
}
interface Datasource extends DatasourceQueryResultRow {
readonly rules: Rule[];
readonly compactionConfig?: CompactionConfig;
readonly compactionStatus?: CompactionStatus;
readonly unused?: boolean;
}
interface DatasourcesAndDefaultRules {
datasources: Datasource[];
defaultRules: Rule[];
}
interface DatasourceQueryResultRow {
datasource: string;
num_segments: number;
num_available_segments: number;
num_segments_to_load: number;
num_segments_to_drop: number;
total_data_size: number;
replicated_size: number;
min_segment_size: number;
avg_segment_size: number;
max_segment_size: number;
total_rows: number;
avg_row_size: number;
readonly datasources: Datasource[];
readonly defaultRules: Rule[];
}
interface RetentionDialogOpenOn {
datasource: string;
rules: Rule[];
readonly datasource: string;
readonly rules: Rule[];
}
interface CompactionDialogOpenOn {
datasource: string;
compactionConfig: CompactionConfig;
readonly datasource: string;
readonly compactionConfig: CompactionConfig;
}
export interface DatasourcesViewProps {
@ -229,19 +227,20 @@ export class DatasourcesView extends React.PureComponent<
COUNT(*) FILTER (WHERE is_available = 1 AND ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1)) AS num_available_segments,
COUNT(*) FILTER (WHERE is_published = 1 AND is_overshadowed = 0 AND is_available = 0) AS num_segments_to_load,
COUNT(*) FILTER (WHERE is_available = 1 AND NOT ((is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1)) AS num_segments_to_drop,
SUM("size") FILTER (WHERE (is_published = 1 AND is_overshadowed = 0)) AS total_data_size,
SUM("size" * "num_replicas") FILTER (WHERE (is_published = 1 AND is_overshadowed = 0)) AS replicated_size,
MIN("size") FILTER (WHERE (is_published = 1 AND is_overshadowed = 0)) AS min_segment_size,
(
SUM("size") FILTER (WHERE (is_published = 1 AND is_overshadowed = 0)) /
COUNT(*) FILTER (WHERE (is_published = 1 AND is_overshadowed = 0))
) AS avg_segment_size,
MAX("size") FILTER (WHERE (is_published = 1 AND is_overshadowed = 0)) AS max_segment_size,
SUM("size") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS total_data_size,
SUM("size" * "num_replicas") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS replicated_size,
MIN("num_rows") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS min_segment_rows,
AVG("num_rows") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS avg_segment_rows,
MAX("num_rows") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) AS max_segment_rows,
SUM("num_rows") FILTER (WHERE (is_published = 1 AND is_overshadowed = 0) OR is_realtime = 1) AS total_rows,
(
SUM("size") FILTER (WHERE (is_published = 1 AND is_overshadowed = 0)) /
SUM("num_rows") FILTER (WHERE (is_published = 1 AND is_overshadowed = 0))
) AS avg_row_size
CASE
WHEN SUM("num_rows") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) <> 0
THEN (
SUM("size") FILTER (WHERE is_published = 1 AND is_overshadowed = 0) /
SUM("num_rows") FILTER (WHERE is_published = 1 AND is_overshadowed = 0)
)
ELSE 0
END AS avg_row_size
FROM sys.segments
GROUP BY 1`;
@ -309,9 +308,9 @@ GROUP BY 1`;
num_segments_to_drop: 0,
replicated_size: -1,
total_data_size: totalDataSize,
min_segment_size: -1,
avg_segment_size: totalDataSize / numSegments,
max_segment_size: -1,
min_segment_rows: -1,
avg_segment_rows: -1,
max_segment_rows: -1,
total_rows: -1,
avg_row_size: -1,
};
@ -361,7 +360,7 @@ GROUP BY 1`;
const allDatasources = (datasources as any).concat(
unused.map(d => ({ datasource: d, unused: true })),
);
allDatasources.forEach((ds: Datasource) => {
allDatasources.forEach((ds: any) => {
ds.rules = rules[ds.datasource] || [];
ds.compactionConfig = compactionConfigs[ds.datasource];
ds.compactionStatus = compactionStatuses[ds.datasource];
@ -869,11 +868,11 @@ GROUP BY 1`;
const totalDataSizeValues = datasources.map(d => formatTotalDataSize(d.total_data_size));
const minSegmentSizeValues = datasources.map(d => formatSegmentSize(d.min_segment_size));
const minSegmentRowsValues = datasources.map(d => formatSegmentRows(d.min_segment_rows));
const avgSegmentSizeValues = datasources.map(d => formatSegmentSize(d.avg_segment_size));
const avgSegmentRowsValues = datasources.map(d => formatSegmentRows(d.avg_segment_rows));
const maxSegmentSizeValues = datasources.map(d => formatSegmentSize(d.max_segment_size));
const maxSegmentRowsValues = datasources.map(d => formatSegmentRows(d.max_segment_rows));
const totalRowsValues = datasources.map(d => formatTotalRows(d.total_rows));
@ -1011,23 +1010,23 @@ GROUP BY 1`;
),
},
{
Header: twoLines('Segment size (MB)', 'min / avg / max'),
show: hiddenColumns.exists('Segment size'),
accessor: 'avg_segment_size',
Header: twoLines('Segment size (rows)', 'minimum / average / maximum'),
show: capabilities.hasSql() && hiddenColumns.exists('Segment size'),
accessor: 'avg_segment_rows',
filterable: false,
width: 150,
width: 220,
Cell: ({ value, original }) => (
<>
<BracedText
text={formatSegmentSize(original.min_segment_size)}
braces={minSegmentSizeValues}
text={formatSegmentRows(original.min_segment_rows)}
braces={minSegmentRowsValues}
/>{' '}
&nbsp;{' '}
<BracedText text={formatSegmentSize(value)} braces={avgSegmentSizeValues} />{' '}
<BracedText text={formatSegmentRows(value)} braces={avgSegmentRowsValues} />{' '}
&nbsp;{' '}
<BracedText
text={formatSegmentSize(original.max_segment_size)}
braces={maxSegmentSizeValues}
text={formatSegmentRows(original.max_segment_rows)}
braces={maxSegmentRowsValues}
/>
</>
),
@ -1044,7 +1043,7 @@ GROUP BY 1`;
},
{
Header: twoLines('Avg. row size', '(bytes)'),
show: hiddenColumns.exists('Avg. row size'),
show: capabilities.hasSql() && hiddenColumns.exists('Avg. row size'),
accessor: 'avg_row_size',
filterable: false,
width: 100,

View File

@ -20,7 +20,9 @@
background: #232d35;
padding: 20px 22px;
.cursor-link {
.cursor-link,
.more-or-less,
.suggestion {
color: #2aabd2;
text-decoration: underline;
cursor: pointer;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
import React from 'react';
import React, { useState } from 'react';
import { HighlightText } from '../../../components';
import { DruidError, RowColumn } from '../../../utils';
@ -26,24 +26,48 @@ import './query-error.scss';
export interface QueryErrorProps {
error: DruidError;
moveCursorTo: (rowColumn: RowColumn) => void;
queryString?: string;
onQueryStringChange?: (newQueryString: string, run?: boolean) => void;
}
export const QueryError = React.memo(function QueryError(props: QueryErrorProps) {
const { error, moveCursorTo } = props;
const { error, moveCursorTo, queryString, onQueryStringChange } = props;
const [showMode, setShowMore] = useState(false);
if (!error.errorMessage) {
return <div className="query-error">{error.message}</div>;
}
const { position } = error;
const { position, suggestion } = error;
let suggestionElement: JSX.Element | undefined;
if (suggestion && queryString && onQueryStringChange) {
const newQuery = suggestion.fn(queryString);
if (newQuery) {
suggestionElement = (
<p>
Suggestion:{' '}
<span
className="suggestion"
onClick={() => {
onQueryStringChange(newQuery, true);
}}
>
{suggestion.label}
</span>
</p>
);
}
}
return (
<div className="query-error">
{suggestionElement}
{error.error && <p>{`Error: ${error.error}`}</p>}
{error.errorMessage && (
{error.errorMessageWithoutExpectation && (
<p>
{position ? (
<HighlightText
text={error.errorMessage}
text={error.errorMessageWithoutExpectation}
find={position.match}
replace={
<span
@ -57,8 +81,24 @@ export const QueryError = React.memo(function QueryError(props: QueryErrorProps)
}
/>
) : (
error.errorMessage
error.errorMessageWithoutExpectation
)}
{error.expectation && !showMode && (
<>
{' '}
<span className="more-or-less" onClick={() => setShowMore(true)}>
More...
</span>
</>
)}
</p>
)}
{error.expectation && showMode && (
<p>
{error.expectation}{' '}
<span className="more-or-less" onClick={() => setShowMore(false)}>
Less...
</span>
</p>
)}
{error.errorClass && <p>{error.errorClass}</p>}

View File

@ -514,6 +514,8 @@ export class QueryView extends React.PureComponent<QueryViewProps, QueryViewStat
moveCursorTo={position => {
this.moveToPosition(position);
}}
queryString={queryString}
onQueryStringChange={this.handleQueryStringChange}
/>
)}
{queryResultState.loading && (