SQL: Allow CSV tests to look more like the CLI (elastic/x-pack-elasticsearch#3640)
This allows CSV tests to include a line between the header and the values that is ignored by the test framework. This optional line can be added to the tests to make them a little easier to read which is useful when they are included in the documentation. As a side effect they also closely mimick the output of the CLI. To the point where you can copy directly from the CLI and paste into the CSV tests. Example: ``` constantYear // tag::year SELECT YEAR(CAST('2018-01-19T10:23:27Z' AS TIMESTAMP)) as year; year --------------- 2018 // end::year ; ``` This can be extracted with a construct like this in the docs: ``` ["source","sql",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{sql-specs}/datetime.csv-spec[year] -------------------------------------------------- ``` Which makes documentation that looks like this: ``` SELECT YEAR(CAST('2018-01-19T10:23:27Z' AS TIMESTAMP)) as year; year --------------- 2018 ``` Which is fairly nice. Original commit: elastic/x-pack-elasticsearch@8c10b5cb10
This commit is contained in:
parent
b165f1c71e
commit
51a6285ba1
|
@ -68,7 +68,7 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase {
|
|||
csvProperties.setProperty("charset", "UTF-8");
|
||||
csvProperties.setProperty("separator", "|");
|
||||
csvProperties.setProperty("trimValues", "true");
|
||||
Tuple<String,String> resultsAndTypes = extractColumnTypes(expectedResults);
|
||||
Tuple<String,String> resultsAndTypes = extractColumnTypesAndStripCli(expectedResults);
|
||||
csvProperties.setProperty("columnTypes", resultsAndTypes.v2());
|
||||
Reader reader = new StringReader(resultsAndTypes.v1());
|
||||
TableReader tableReader = new TableReader() {
|
||||
|
@ -103,26 +103,37 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase {
|
|||
return connectionProperties;
|
||||
}
|
||||
|
||||
private Tuple<String,String> extractColumnTypes(String expectedResults) throws IOException {
|
||||
try (StringReader reader = new StringReader(expectedResults)){
|
||||
try (BufferedReader bufferedReader = new BufferedReader(reader)){
|
||||
private Tuple<String,String> extractColumnTypesAndStripCli(String expectedResults) throws IOException {
|
||||
try (StringReader reader = new StringReader(expectedResults);
|
||||
BufferedReader bufferedReader = new BufferedReader(reader);
|
||||
StringWriter writer = new StringWriter();
|
||||
BufferedWriter bufferedWriter = new BufferedWriter(writer)) {
|
||||
|
||||
String header = bufferedReader.readLine();
|
||||
if (!header.contains(":")) {
|
||||
Tuple<String, String> headerAndTypes;
|
||||
|
||||
if (header.contains(":")) {
|
||||
headerAndTypes = extractColumnTypesFromHeader(header);
|
||||
} else {
|
||||
// No type information in headers, no need to parse columns - trigger auto-detection
|
||||
return new Tuple<>(expectedResults,"");
|
||||
headerAndTypes = new Tuple<>(header, "");
|
||||
}
|
||||
try (StringWriter writer = new StringWriter()) {
|
||||
try (BufferedWriter bufferedWriter = new BufferedWriter(writer)){
|
||||
Tuple<String, String> headerAndColumns = extractColumnTypesFromHeader(header);
|
||||
bufferedWriter.write(headerAndColumns.v1());
|
||||
bufferedWriter.write(headerAndTypes.v1());
|
||||
bufferedWriter.newLine();
|
||||
|
||||
/* Read the next line. It might be a separator designed to look like the cli.
|
||||
* If it is, then throw it out. If it isn't then keep it.
|
||||
*/
|
||||
String maybeSeparator = bufferedReader.readLine();
|
||||
if (maybeSeparator != null && false == maybeSeparator.startsWith("----")) {
|
||||
bufferedWriter.write(maybeSeparator);
|
||||
bufferedWriter.newLine();
|
||||
}
|
||||
|
||||
bufferedWriter.flush();
|
||||
// Copy the rest of test
|
||||
Streams.copy(bufferedReader, bufferedWriter);
|
||||
return new Tuple<>(writer.toString(), headerAndColumns.v2());
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Tuple<>(writer.toString(), headerAndTypes.v2());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -167,8 +167,8 @@ d:i | c:l | s:i
|
|||
constantYear
|
||||
// tag::year
|
||||
SELECT YEAR(CAST('2018-01-19T10:23:27Z' AS TIMESTAMP)) as year;
|
||||
|
||||
year
|
||||
---------------
|
||||
2018
|
||||
// end::year
|
||||
;
|
||||
|
|
Loading…
Reference in New Issue