Migrate remaining compare tests to IT

Migrates the remaining comparison tests to integration tests
so they run easilly in gradle against real Elasticsearch nodes.

This breaks running them in the IDE without running an Elasticsearch
node and setting `-Drest.test.cluster=localhost:9200`. You can run
a compatible node with `cd sql-clients/jdbc && gradle run`.

Original commit: elastic/x-pack-elasticsearch@90a7c2dae7
This commit is contained in:
Nik Everett 2017-06-29 10:15:49 -04:00
parent 2a272446a4
commit 9858d2ea3c
16 changed files with 50 additions and 896 deletions

View File

@ -3,21 +3,22 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query.function.aggregate;
package org.elasticsearch.xpack.sql.jdbc.compare;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTestCase;
import java.nio.file.Path;
public class AggSpecTests extends CompareToH2BaseTestCase {
public AggSpecTests(String queryName, String query, Integer lineNumber, Path source) {
/**
* Tests for aggregations created by {@code GROUP BY}.
*/
public class AggIT extends CompareToH2BaseTestCase {
public AggIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@ParametersFactory
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/function/aggregate/agg.spec");
return readScriptSpec("agg");
}
}

View File

@ -11,7 +11,6 @@ import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.sql.jdbc.JdbcIntegrationTestCase;
import org.elasticsearch.xpack.sql.jdbc.integration.util.EsDataLoader;
import java.io.IOException;
import java.net.URISyntaxException;
@ -137,7 +136,7 @@ public abstract class CompareToH2BaseTestCase extends JdbcIntegrationTestCase {
createIndex.endObject().endObject();
client().performRequest("PUT", "/emp", emptyMap(), new StringEntity(createIndex.string(), ContentType.APPLICATION_JSON));
URL dataSet = EsDataLoader.class.getResource("/employees.csv");
URL dataSet = CompareToH2BaseTestCase.class.getResource("/employees.csv");
if (dataSet == null) {
throw new IllegalArgumentException("Can't find employees.csv");
}

View File

@ -0,0 +1,24 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.compare;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.nio.file.Path;
/**
* Tests for functions related to dates and times.
*/
public class DateTimeIT extends CompareToH2BaseTestCase {
public DateTimeIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@ParametersFactory
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("datetime");
}
}

View File

@ -9,6 +9,9 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.nio.file.Path;
/**
* Tests for filters added by {@code WHERE} clauses.
*/
public class FilterIT extends CompareToH2BaseTestCase {
public FilterIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);

View File

@ -3,16 +3,19 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query;
package org.elasticsearch.xpack.sql.jdbc.compare;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.nio.file.Path;
public class DebugSpecTests extends CompareToH2BaseTestCase {
public DebugSpecTests(String queryName, String query, Integer lineNumber, Path source) {
public class MathIT extends CompareToH2BaseTestCase {
public MathIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
@ParametersFactory
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/debug.spec");
return readScriptSpec("math");
}
}

View File

@ -9,6 +9,9 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.nio.file.Path;
/**
* Tests for basic {@code SELECT} statements without {@code WHERE} or {@code GROUP BY} or functions.
*/
public class SelectIT extends CompareToH2BaseTestCase {
public SelectIT(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);

View File

@ -1,106 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.test.ESTestCase;
import org.junit.Assert;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.jdbc.compare.JdbcAssert.assertResultSets;
public abstract class CompareToH2BaseTestCase extends ESTestCase {
// NOCOMMIT subclasses should probably all be integration tests running against a running Elasticsearch
public final String queryName;
public final String query;
public final Integer lineNumber;
public final Path source;
public CompareToH2BaseTestCase(String queryName, String query, Integer lineNumber, Path source) {
this.queryName = queryName;
this.query = query;
this.lineNumber = lineNumber;
this.source = source;
}
protected static List<Object[]> readScriptSpec(String url) throws Exception {
Path source = PathUtils.get(CompareToH2BaseTestCase.class.getResource(url).toURI());
List<String> lines = Files.readAllLines(source);
Map<String, Integer> testNames = new LinkedHashMap<>();
List<Object[]> pairs = new ArrayList<>();
String name = null;
StringBuilder query = new StringBuilder();
for (int i = 0; i < lines.size(); i++) {
String line = lines.get(i).trim();
// ignore comments
if (!line.isEmpty() && !line.startsWith("//")) {
if (name == null) {
if (testNames.keySet().contains(line)) {
throw new IllegalStateException(format(Locale.ROOT, "Duplicate test name '%s' at line %d (previously seen at line %d)", line, i, testNames.get(line)));
}
else {
name = Strings.capitalize(line);
testNames.put(name, Integer.valueOf(i));
}
}
else {
if (line.endsWith(";")) {
query.append(line.substring(0, line.length() - 1));
}
pairs.add(new Object[] { name, query.toString(), Integer.valueOf(i), source });
name = null;
query.setLength(0);
}
}
}
Assert.assertNull("Cannot find query for test " + name, name);
return pairs;
}
public void testQuery() throws Throwable {
try (Connection h2 = QuerySuite.h2Con().get();
Connection es = QuerySuite.esCon().get()) {
ResultSet expected, actual;
try {
expected = h2.createStatement().executeQuery(query);
actual = es.createStatement().executeQuery(query);
assertResultSets(expected, actual);
} catch (AssertionError ae) {
throw reworkException(new AssertionError(errorMessage(ae), ae.getCause()));
}
} catch (Throwable th) {
throw reworkException(new RuntimeException(errorMessage(th)));
}
}
private String errorMessage(Throwable th) {
return format(Locale.ROOT, "test%s@%s:%d failed\n\"%s\"\n%s", queryName, source.getFileName().toString(), lineNumber, query, th.getMessage());
}
private Throwable reworkException(Throwable th) {
StackTraceElement[] stackTrace = th.getStackTrace();
StackTraceElement[] redone = new StackTraceElement[stackTrace.length + 1];
System.arraycopy(stackTrace, 0, redone, 1, stackTrace.length);
redone[0] = new StackTraceElement(getClass().getName(), queryName, source.getFileName().toString(), lineNumber);
th.setStackTrace(redone);
return th;
}
}

View File

@ -1,92 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.function.Supplier;
import org.elasticsearch.common.CheckedSupplier;
import org.elasticsearch.xpack.sql.jdbc.JdbcTemplate;
import org.elasticsearch.xpack.sql.jdbc.compare.FilterIT;
import org.elasticsearch.xpack.sql.jdbc.compare.SelectIT;
import org.elasticsearch.xpack.sql.jdbc.integration.query.function.aggregate.AggSpecTests;
import org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.datetime.DateTimeSpecTests;
import org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.math.MathSpecTests;
import org.elasticsearch.xpack.sql.jdbc.integration.util.EsDataLoader;
import org.elasticsearch.xpack.sql.jdbc.integration.util.EsJdbcServer;
import org.elasticsearch.xpack.sql.jdbc.integration.util.H2;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
import static org.junit.Assert.assertNotNull;
@RunWith(Suite.class)
@SuiteClasses({ SelectIT.class, FilterIT.class, AggSpecTests.class, MathSpecTests.class, DateTimeSpecTests.class })
//@SuiteClasses({ DebugSpecTest.class })
//@SuiteClasses({ AggSpecTest.class })
//@SuiteClasses({ DateTimeSpecTest.class })
//@SuiteClasses({ MathSpecTest.class })
public class QuerySuite {
// NOCOMMIT we don't have suites in core so this is hard to figure out
//
// REMOTE ACCESS
//
private static boolean REMOTE = true;
private static String REMOTE_H2 = "jdbc:h2:tcp://localhost/./essql";
@ClassRule
public static H2 H2 = new H2(null);
@ClassRule
public static EsJdbcServer ES_JDBC_SERVER = new EsJdbcServer(REMOTE, true);
private static JdbcTemplate H2_JDBC, ES_JDBC;
@BeforeClass
public static void setupDB() throws Exception {
H2_JDBC = new JdbcTemplate(H2);
//ES_CON = new JdbcTemplate(ES_JDBC_SERVER);
setupH2();
if (!REMOTE) {
setupH2();
setupES();
}
}
private static void setupH2() throws Exception {
h2().execute("RUNSCRIPT FROM 'classpath:org/elasticsearch/sql/jdbc/integration/h2-setup.sql'");
}
private static void setupES() throws Exception {
EsDataLoader.loadData();
}
public static CheckedSupplier<Connection, SQLException> h2Con() {
return H2;
}
public static CheckedSupplier<Connection, SQLException> esCon() {
return ES_JDBC_SERVER;
}
public static JdbcTemplate h2() {
assertNotNull("H2 connection null - make sure the suite is ran", H2_JDBC);
return H2_JDBC;
}
public static JdbcTemplate es() {
assertNotNull("ES connection null - make sure the suite is ran", H2_JDBC);
return ES_JDBC;
}
}

View File

@ -1,20 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.datetime;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTestCase;
import java.nio.file.Path;
public class DateTimeSpecTests extends CompareToH2BaseTestCase {
public DateTimeSpecTests(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/function/scalar/datetime/datetime.spec");
}
}

View File

@ -1,20 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.query.function.scalar.math;
import org.elasticsearch.xpack.sql.jdbc.integration.query.CompareToH2BaseTestCase;
import java.nio.file.Path;
public class MathSpecTests extends CompareToH2BaseTestCase {
public MathSpecTests(String queryName, String query, Integer lineNumber, Path source) {
super(queryName, query, lineNumber, source);
}
public static Iterable<Object[]> queries() throws Exception {
return readScriptSpec("/org/elasticsearch/sql/jdbc/integration/query/function/scalar/math/math.spec");
}
}

View File

@ -1,571 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.util;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.IndicesAdminClient;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.junit.BeforeClass;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URL;
import java.nio.file.Files;
import java.util.List;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toList;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.ESTestCase.between;
import static org.junit.Assert.fail;
// used rarely just to load the data (hence why it's marked as abstract)
// NOCOMMIT we should set this up to run with the tests if we need it
public abstract class EsDataLoader {
private static final Logger log = ESLoggerFactory.getLogger(EsDataLoader.class.getName());
private static Client client;
//@ClassRule
//public static LocalEs resource = new LocalEs();
private static void initClient() {
if (client == null) {
client = new PreBuiltTransportClient(Settings.EMPTY)
.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), 9300));
}
}
private static Client client() {
return client;
}
private static IndicesAdminClient indices() {
return client().admin().indices();
}
//
// based on: https://github.com/datacharmer/test_db
//
@BeforeClass
public static void loadData() throws Exception {
initClient();
loadEmployees();
// loadTitles();
// loadSalaries();
// loadDepartments();
// loadDepartmentsToEmployees();
//
// loadDepartmentsToEmployeesNested();
// loadEmployeesNested();
waitForIndices("emp");
}
public static void afterClass() throws Exception {
if (client != null) {
client.close();
client = null;
}
}
static void loadEmployees() throws Exception {
String index = "emp";
String type = "emp";
// create index
indices().create(new CreateIndexRequest(index)).get();
// add mapping
indices().preparePutMapping(index)
.setType(type)
.setSource(jsonBuilder()
.startObject()
.startObject(type)
.startObject("properties")
.startObject("emp_no").field("type", "integer").endObject()
.startObject("birth_date").field("type", "date").endObject()
// .startObject("first_name").field("type", "text").endObject()
// .startObject("last_name").field("type", "text").endObject()
// .startObject("gender").field("type", "keyword").endObject()
.startObject("hire_date").field("type", "date").endObject()
.endObject()
.endObject()
.endObject()
)
.get();
loadFromFile("/employees.csv", index, type, "emp_no", "birth_date", "first_name", "last_name", "gender", "hire_date");
}
private static void loadTitles() throws Exception {
String index = "title";
String type = "title";
// create index
indices().create(new CreateIndexRequest(index)).get();
// add mapping
indices().preparePutMapping(index)
.setType(type)
.setSource(jsonBuilder()
.startObject()
.startObject(type)
.startObject("properties")
.startObject("emp_no").field("type", "short").endObject()
// .startObject("title").field("type", "text").endObject()
.startObject("from_date").field("type", "date").endObject()
.startObject("to_date").field("type", "date").endObject()
.endObject()
.endObject()
.endObject()
)
.get();
// add data
loadFromFile("/titles.csv", index, type, "emp_no", "title", "from_date", "to_date");
}
private static void loadSalaries() throws Exception {
String index = "sal";
String type = "sal";
// create index
indices().create(new CreateIndexRequest(index)).get();
// add mapping
indices().preparePutMapping(index)
.setType(type)
.setSource(jsonBuilder()
.startObject()
.startObject(type)
.startObject("properties")
.startObject("emp_no").field("type", "short").endObject()
.startObject("salary").field("type", "integer").endObject()
.startObject("from_date").field("type", "date").endObject()
.startObject("to_date").field("type", "date").endObject()
.endObject()
.endObject()
.endObject()
)
.get();
// add data
loadFromFile("/salaries.csv", index, type, "emp_no", "salary", "from_date", "to_date");
}
private static void loadDepartments() throws Exception {
String index = "dep";
String type = "dep";
// create index
indices().create(new CreateIndexRequest(index)).get();
// add mapping
indices().preparePutMapping(index)
.setType(type)
.setSource(jsonBuilder()
.startObject()
.startObject(type)
.startObject("properties")
//.startObject("dept_no").field("type", "text").endObject()
//.startObject("dept_name").field("type", "text").endObject()
.endObject()
.endObject()
.endObject()
)
.get();
// add data
loadFromFile("/departments.csv", index, type, "dept_no", "dept_name");
}
private static void loadDepartmentsToEmployees() throws Exception {
String index = "deptoemp";
String type = "deptoemp";
// create index
indices().create(new CreateIndexRequest(index)).get();
// add mapping
indices().preparePutMapping(index)
.setType(type)
.setSource(jsonBuilder()
.startObject()
.startObject(type)
.startObject("properties")
.startObject("emp_no").field("type", "short").endObject()
//.startObject("dept_no").field("type", "text").endObject()
.startObject("from_date").field("type", "date").endObject()
.startObject("to_date").field("type", "date").endObject()
.endObject()
.endObject()
.endObject()
)
.get();
// add data
loadFromFile("/dep_emp.csv", index, type, "emp_no", "dept_no", "from_date", "to_date");
}
private static void loadDepartmentsToEmployeesNested() throws Exception {
String index = "nested_deptoemp";
String type = "nested_deptoemp";
// create index
indices().create(new CreateIndexRequest(index)).get();
// add mapping
indices().preparePutMapping(index)
.setType(type)
.setSource(jsonBuilder()
.startObject()
.startObject(type)
.startObject("properties")
// .startObject("dept_no").field("type", "text").endObject()
// .startObject("dept_name").field("type", "text").endObject()
.startObject("employee").field("type", "nested")
.startObject("properties")
.startObject("emp_no").field("type", "short").endObject()
.startObject("birth_date").field("type", "date").endObject()
// .startObject("first_name").field("type", "text").endObject()
// .startObject("last_name").field("type", "text").endObject()
.startObject("gender").field("type", "keyword").endObject()
.startObject("from_date").field("type", "date").endObject()
.startObject("to_date").field("type", "date").endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
.get();
log.info("About to parse and load the employee to department nested datasets");
// read the 3 files and do nested-loop joins in memory before sending the data out
List<String> deps = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/departments.csv").toURI()));
List<String> dep_emp = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/dep_emp.csv").toURI()));
List<String> emp = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/employees.csv").toURI()));
String[] dCols = { "dept_no", "dept_name" };
String[] empCol = { "emp_no", "birth_date", "first_name", "last_name", "gender", "hire_date", "from_date", "to_date" };
BulkRequestBuilder brb = client().prepareBulk();
deps.forEach(d -> {
try {
String[] dSplit = d.split(",");
// top-level = dep
XContentBuilder sourceBuilder = jsonBuilder().startObject();
for (int i = 0; i < dSplit.length; i++) {
sourceBuilder.field(dCols[i], dSplit[i]);
}
sourceBuilder.startArray("employee");
String match = "," + dSplit[0];
List<String[]> empKeys = dep_emp.stream()
.filter(l -> l.contains(match))
.map(m -> m.replace(match, ""))
.map(m -> m.split(","))
.collect(toList());
for (String[] empKey : empKeys) {
String m = empKey[0] + ",";
for (String e : emp) {
if (e.startsWith(m)) {
String[] empSplit = e.split(",");
sourceBuilder.startObject();
for (int i = 0; i < empSplit.length; i++) {
sourceBuilder.field(empCol[i], empSplit[i]);
}
sourceBuilder.field("from_date", empKey[1]);
sourceBuilder.field("to_date", empKey[2]);
sourceBuilder.endObject();
// found the match, move to the next item in the higher loop
break;
}
}
}
sourceBuilder.endArray();
sourceBuilder.endObject();
brb.add(client().prepareIndex(index, type).setSource(sourceBuilder));
} catch (IOException ex) {
throw new IllegalStateException(ex);
}
});
BulkResponse br = brb.get();
if (br.hasFailures()) {
fail(br.buildFailureMessage());
}
log.info("Dataset loaded in {}", br.getTook().format());
}
private static void loadEmployeesNested() throws Exception {
String index = "demo";
String type = "employees";
// create index
indices().create(new CreateIndexRequest(index)).get();
// add mapping
indices().preparePutMapping(index)
.setType(type)
.setSource(jsonBuilder()
.startObject()
.startObject(type)
.startObject("properties")
.startObject("emp_no").field("type", "integer").endObject()
.startObject("birth_date").field("type", "date").endObject()
.startObject("hire_date").field("type", "date").endObject()
.startObject("department").field("type", "nested")
.startObject("properties")
.startObject("from_date").field("type", "date").endObject()
.startObject("to_date").field("type", "date").endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
.get();
log.info("About to parse and load the department to employee nested datasets");
// read the 3 files and do nested-loop joins in memory before sending the data out
List<String> deps = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/departments.csv").toURI()));
List<String> dep_emp = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/dep_emp.csv").toURI()));
List<String> employees = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/employees.csv").toURI()));
String[] dCols = { "dept_no", "dept_name" };
String[] empCol = { "emp_no", "birth_date", "first_name", "last_name", "gender", "hire_date", "from_date", "to_date" };
BulkRequestBuilder brb = client().prepareBulk();
employees.forEach(emp -> {
try {
String[] eSplit = emp.split(",");
// top-level = emp
XContentBuilder sourceBuilder = jsonBuilder().startObject();
for (int i = 0; i < eSplit.length; i++) {
sourceBuilder.field(empCol[i], eSplit[i]);
}
sourceBuilder.startArray("department");
String match = eSplit[0] + ",";
List<String[]> depKeys = dep_emp.stream()
.filter(l -> l.contains(match))
.map(m -> m.replace(match, ""))
.map(m -> m.split(","))
.collect(toList());
for (String[] depKey : depKeys) {
String m = depKey[0] + ",";
for (String dep : deps) {
if (dep.startsWith(m)) {
String[] depSplit = dep.split(",");
sourceBuilder.startObject();
for (int i = 0; i < depSplit.length; i++) {
sourceBuilder.field(dCols[i], depSplit[i]);
}
sourceBuilder.field("from_date", depKey[1]);
sourceBuilder.field("to_date", depKey[2]);
sourceBuilder.endObject();
// found the match, move to the next item in the higher loop
break;
}
}
}
sourceBuilder.endArray();
sourceBuilder.endObject();
brb.add(client().prepareIndex(index, type).setSource(sourceBuilder));
} catch (IOException ex) {
throw new IllegalStateException(ex);
}
});
BulkResponse br = brb.get();
if (br.hasFailures()) {
fail(br.buildFailureMessage());
}
log.info("Dataset loaded in {}", br.getTook().format());
}
private static void loadEmployeesNotAnalyzedNested() throws Exception {
String index = "emp";
String type = "emp";
// create index
indices().create(new CreateIndexRequest(index)).get();
// add mapping
indices().preparePutMapping(index)
.setType(type)
.setSource(jsonBuilder()
.startObject()
.startObject(type)
.startObject("properties")
.startObject("emp_no").field("type", "integer").endObject()
.startObject("birth_date").field("type", "date").endObject()
// .startObject("first_name").field("type", "keyword").endObject()
// .startObject("last_name").field("type", "keyword").endObject()
// .startObject("gender").field("type", "keyword").endObject()
.startObject("tenure").field("type", "integer").endObject()
.startObject("salary").field("type", "integer").endObject()
.startObject("dep").field("type", "nested")
.startObject("properties")
.startObject("from_date").field("type", "date").endObject()
// .startObject("dept_name").field("type", "keyword").endObject()
.startObject("to_date").field("type", "date").endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
.get();
log.info("About to parse and load the department to employee nested datasets");
// read the 3 files and do nested-loop joins in memory before sending the data out
List<String> deps = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/departments.csv").toURI()));
List<String> dep_emp = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/dep_emp.csv").toURI()));
List<String> employees = Files.readAllLines(PathUtils.get(EsDataLoader.class.getResource("/employees.csv").toURI()));
String[] dCols = { "dept_no", "dept_name" };
String[] empCol = { "emp_no", "birth_date", "first_name", "last_name", "gender", "tenure", "from_date", "to_date" };
BulkRequestBuilder brb = client().prepareBulk();
employees.forEach(emp -> {
try {
String[] eSplit = emp.split(",");
// compute age from birth_date
Integer year = Integer.parseInt(eSplit[5].substring(0, 4));
eSplit[5] = String.valueOf(2017 - year);
// top-level = emp
XContentBuilder sourceBuilder = jsonBuilder().startObject();
for (int i = 0; i < eSplit.length; i++) {
sourceBuilder.field(empCol[i], eSplit[i]);
}
sourceBuilder.field("salary", between(38000, 106000));
sourceBuilder.startArray("dep");
String match = eSplit[0] + ",";
List<String[]> depKeys = dep_emp.stream()
.filter(l -> l.contains(match))
.map(m -> m.replace(match, ""))
.map(m -> m.split(","))
.collect(toList());
for (String[] depKey : depKeys) {
String m = depKey[0] + ",";
for (String dep : deps) {
if (dep.startsWith(m)) {
String[] depSplit = dep.split(",");
sourceBuilder.startObject();
for (int i = 0; i < depSplit.length; i++) {
sourceBuilder.field(dCols[i], depSplit[i]);
}
sourceBuilder.field("from_date", depKey[1]);
sourceBuilder.field("to_date", depKey[2]);
sourceBuilder.endObject();
// found the match, move to the next item in the higher loop
break;
}
}
}
sourceBuilder.endArray();
sourceBuilder.endObject();
brb.add(client().prepareIndex(index, type).setSource(sourceBuilder));
} catch (IOException ex) {
throw new IllegalStateException(ex);
}
});
BulkResponse br = brb.get();
if (br.hasFailures()) {
fail(br.buildFailureMessage());
}
log.info("Dataset loaded in {}", br.getTook().format());
}
private static void waitForIndices(String...indices) {
for (String index : indices) {
// force refresh
indices().prepareRefresh(index).get(TimeValue.timeValueSeconds(5));
// wait for index to fully start
client().admin().cluster().prepareHealth(index).setTimeout(TimeValue.timeValueSeconds(10)).setWaitForYellowStatus().get();
}
}
private static void loadFromFile(String resourceName, String index, String type, String... columns) throws Exception {
URL dataSet = EsDataLoader.class.getResource(resourceName);
log.info("About to parse and load dataset {}", dataSet);
BulkRequestBuilder brb = client().prepareBulk();
try (Stream<String> stream = Files.lines(PathUtils.get(dataSet.toURI()))) {
stream.forEach(s -> {
try {
XContentBuilder sourceBuilder = jsonBuilder().startObject();
String[] lineSplit = s.split(",");
for (int i = 0; i < lineSplit.length; i++) {
sourceBuilder.field(columns[i], lineSplit[i]);
}
sourceBuilder.endObject();
brb.add(client().prepareIndex(index, type).setSource(sourceBuilder));
} catch (IOException ex) {
throw new IllegalStateException(ex);
}
});
}
BulkResponse br = brb.get();
if (br.hasFailures()) {
fail(br.buildFailureMessage());
}
log.info("Dataset loaded in {}", br.getTook().format());
}
}

View File

@ -1,60 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.jdbc.integration.util;
import org.elasticsearch.common.CheckedSupplier;
import org.junit.rules.ExternalResource;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Properties;
public class H2 extends ExternalResource implements CheckedSupplier<Connection, SQLException> {
static {
try {
Class.forName("org.h2.Driver");
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
private final String url;
private final Properties DEFAULT_PROPS = new Properties();
private Connection keepAlive;
public H2() {
this(null);
}
public H2(String db) {
this.url = (db == null ? "jdbc:h2:mem:essql" : db) + ";DATABASE_TO_UPPER=false;ALIAS_COLUMN_NAME=true";
if (db != null) {
DEFAULT_PROPS.setProperty("user", "sa");
}
}
@Override
protected void before() throws Throwable {
keepAlive = get();
}
@Override
protected void after() {
try {
keepAlive.close();
} catch (SQLException e) {
// ignore
}
keepAlive = null;
}
@Override
public Connection get() throws SQLException {
return DriverManager.getConnection(url, DEFAULT_PROPS);
}
}

View File

@ -125,8 +125,6 @@ aggMaxAndCountWithFilter
SELECT gender g, MAX(emp_no) m, COUNT(1) c FROM "emp.emp" WHERE emp_no > 10000 GROUP BY gender;
aggMaxAndCountWithFilterAndLimit
SELECT gender g, MAX(emp_no) m, COUNT(1) c FROM "emp.emp" WHERE emp_no > 10000 GROUP BY gender LIMIT 1;
aggMinWithCastAndFilter
SELECT gender g, CAST(MAX(emp_no) AS SMALLINT) m, COUNT(1) c FROM "emp.emp" WHERE emp_no < 10020 GROUP BY gender;
aggMaxWithAlias
SELECT gender g, MAX(emp_no) m FROM "emp.emp" GROUP BY g;
@ -161,11 +159,11 @@ aggSumWithHaving
SELECT gender g, CAST(SUM(emp_no) AS INT) s FROM "emp.emp" GROUP BY g HAVING SUM(emp_no) > 10;
aggSumWithHavingOnAlias
SELECT gender g, CAST(SUM(emp_no) AS INT) s FROM "emp.emp" GROUP BY g HAVING s > 10;
aggMaxWithMultipleHaving
aggSumWithMultipleHaving
SELECT gender g, CAST(SUM(emp_no) AS INT) s FROM "emp.emp" GROUP BY g HAVING s > 10 AND s < 10000000;
aggMaxWithMultipleHavingWithLimit
aggSumWithMultipleHavingWithLimit
SELECT gender g, CAST(SUM(emp_no) AS INT) s FROM "emp.emp" GROUP BY g HAVING s > 10 AND s < 10000000 LIMIT 1;
aggMaxWithMultipleHavingOnAliasAndFunction
aggSumWithMultipleHavingOnAliasAndFunction
SELECT gender g, CAST(SUM(emp_no) AS INT) s FROM "emp.emp" GROUP BY g HAVING s > 10 AND SUM(emp_no) > 10000000;
// AVG

View File

@ -30,8 +30,6 @@ mathRadians
SELECT RADIANS(emp_no) m, first_name FROM "emp.emp" WHERE emp_no < 10010 ORDER BY emp_no;
mathRound
SELECT CAST(ROUND(emp_no) AS INT) m, first_name FROM "emp.emp" WHERE emp_no < 10010 ORDER BY emp_no;
mathRadians
SELECT RADIANS(emp_no) m, first_name FROM "emp.emp" WHERE emp_no < 10010 ORDER BY emp_no;
mathSin
SELECT SIN(emp_no) m, first_name FROM "emp.emp" WHERE emp_no < 10010 ORDER BY emp_no;
mathSinH
@ -54,7 +52,7 @@ SELECT EXP(ABS(CEIL(SIN(DEGREES(emp_no))))) m, first_name FROM "emp.emp" WHERE E
//
// Filter by Scalar
//
//
mathAbsFilterAndOrder
SELECT emp_no, ABS(emp_no) m, first_name FROM "emp.emp" WHERE ABS(emp_no) < 10010 ORDER BY ABS(emp_no);
mathACosFilterAndOrder
@ -72,6 +70,6 @@ SELECT emp_no, CEIL(emp_no) m, first_name FROM "emp.emp" WHERE CEIL(emp_no) < 10
//
// constants - added when folding will be supported
//
//
//mathConstantPI
//SELECT ABS(emp_no) m, PI(), first_name FROM "emp.emp" WHERE emp_no < 10010 ORDER BY emp_no;

View File

@ -1,6 +0,0 @@
//
// Spec used for debugging a certain test (without having to alter the spec suite of which it might be part of)
//
debug
SELECT emp_no, ABS(emp_no) m, first_name FROM "emp.emp" WHERE ABS(emp_no) < 10010 ORDER BY ABS(emp_no);