SQL: Allow CSV tests to look more like the CLI (elastic/x-pack-elasticsearch#3640)

This allows CSV tests to include a line between the header and the
values that is ignored by the test framework. This optional line can be
added to the tests to make them a little easier to read which is useful
when they are included in the documentation. As a side effect they also
closely mimick the output of the CLI. To the point where you can copy
directly from the CLI and paste into the CSV tests.

Example:
```
constantYear
// tag::year
SELECT YEAR(CAST('2018-01-19T10:23:27Z' AS TIMESTAMP)) as year;
     year
---------------
2018
// end::year
;
```

This can be extracted with a construct like this in the docs:
```
["source","sql",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{sql-specs}/datetime.csv-spec[year]
--------------------------------------------------
```

Which makes documentation that looks like this:
```
SELECT YEAR(CAST('2018-01-19T10:23:27Z' AS TIMESTAMP)) as year;
     year
---------------
2018
```

Which is fairly nice.

Original commit: elastic/x-pack-elasticsearch@8c10b5cb10
This commit is contained in:
Nik Everett 2018-01-19 16:24:42 -05:00 committed by GitHub
parent b165f1c71e
commit 51a6285ba1
2 changed files with 33 additions and 22 deletions

View File

@ -68,7 +68,7 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase {
csvProperties.setProperty("charset", "UTF-8");
csvProperties.setProperty("separator", "|");
csvProperties.setProperty("trimValues", "true");
Tuple<String,String> resultsAndTypes = extractColumnTypes(expectedResults);
Tuple<String,String> resultsAndTypes = extractColumnTypesAndStripCli(expectedResults);
csvProperties.setProperty("columnTypes", resultsAndTypes.v2());
Reader reader = new StringReader(resultsAndTypes.v1());
TableReader tableReader = new TableReader() {
@ -103,26 +103,37 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase {
return connectionProperties;
}
private Tuple<String,String> extractColumnTypes(String expectedResults) throws IOException {
try (StringReader reader = new StringReader(expectedResults)){
try (BufferedReader bufferedReader = new BufferedReader(reader)){
String header = bufferedReader.readLine();
if (!header.contains(":")) {
// No type information in headers, no need to parse columns - trigger auto-detection
return new Tuple<>(expectedResults,"");
}
try (StringWriter writer = new StringWriter()) {
try (BufferedWriter bufferedWriter = new BufferedWriter(writer)){
Tuple<String, String> headerAndColumns = extractColumnTypesFromHeader(header);
bufferedWriter.write(headerAndColumns.v1());
bufferedWriter.newLine();
bufferedWriter.flush();
// Copy the rest of test
Streams.copy(bufferedReader, bufferedWriter);
return new Tuple<>(writer.toString(), headerAndColumns.v2());
}
}
private Tuple<String,String> extractColumnTypesAndStripCli(String expectedResults) throws IOException {
try (StringReader reader = new StringReader(expectedResults);
BufferedReader bufferedReader = new BufferedReader(reader);
StringWriter writer = new StringWriter();
BufferedWriter bufferedWriter = new BufferedWriter(writer)) {
String header = bufferedReader.readLine();
Tuple<String, String> headerAndTypes;
if (header.contains(":")) {
headerAndTypes = extractColumnTypesFromHeader(header);
} else {
// No type information in headers, no need to parse columns - trigger auto-detection
headerAndTypes = new Tuple<>(header, "");
}
bufferedWriter.write(headerAndTypes.v1());
bufferedWriter.newLine();
/* Read the next line. It might be a separator designed to look like the cli.
* If it is, then throw it out. If it isn't then keep it.
*/
String maybeSeparator = bufferedReader.readLine();
if (maybeSeparator != null && false == maybeSeparator.startsWith("----")) {
bufferedWriter.write(maybeSeparator);
bufferedWriter.newLine();
}
bufferedWriter.flush();
// Copy the rest of test
Streams.copy(bufferedReader, bufferedWriter);
return new Tuple<>(writer.toString(), headerAndTypes.v2());
}
}

View File

@ -167,8 +167,8 @@ d:i | c:l | s:i
constantYear
// tag::year
SELECT YEAR(CAST('2018-01-19T10:23:27Z' AS TIMESTAMP)) as year;
year
year
---------------
2018
// end::year
;