It will conflict with moving the `sql:server` project to `plugin:sql`
and we're fairly sure we need to rework or remove it anyway.

relates elastic/x-pack-elasticsearch#3557

Original commit: elastic/x-pack-elasticsearch@763072c182
This commit is contained in:
Nik Everett 2018-01-17 15:04:14 -05:00 committed by GitHub
parent 74ae8e3373
commit d6e292087b
8 changed files with 30 additions and 314 deletions

View File

@ -16,19 +16,6 @@ dependencies {
// There are *no* CLI testing dependencies because we // There are *no* CLI testing dependencies because we
// communicate fork a new CLI process when we need it. // communicate fork a new CLI process when we need it.
// Used to support embedded testing mode
compile(project(':x-pack-elasticsearch:sql:server')) {
transitive = false
}
compile(project(':x-pack-elasticsearch:sql:rest-proto')) {
transitive = false
}
compile "org.elasticsearch.client:transport:${version}"
// Needed by embedded server
compile project(path: ':modules:lang-painless', configuration: 'runtime')
} }
/* disable unit tests because these are all integration tests used /* disable unit tests because these are all integration tests used
@ -37,23 +24,6 @@ test.enabled = false
dependencyLicenses.enabled = false dependencyLicenses.enabled = false
// Allow for com.sun.net.httpserver.* usage for embedded mode
eclipse {
classpath.file {
whenMerged { cp ->
def con = entries.find { e ->
e.kind == "con" && e.toString().contains("org.eclipse.jdt.launching.JRE_CONTAINER")
}
con.accessRules.add(new org.gradle.plugins.ide.eclipse.model.AccessRule(
"accessible", "com/sun/net/httpserver/*"))
}
}
}
forbiddenApisMain {
bundledSignatures -= 'jdk-non-portable'
bundledSignatures += 'jdk-internal'
}
// the main files are actually test files, so use the appropriate forbidden api sigs // the main files are actually test files, so use the appropriate forbidden api sigs
forbiddenApisMain { forbiddenApisMain {
signaturesURLs = [PrecommitTasks.getResource('/forbidden/es-all-signatures.txt'), signaturesURLs = [PrecommitTasks.getResource('/forbidden/es-all-signatures.txt'),
@ -94,13 +64,9 @@ subprojects {
} }
testCompile "org.elasticsearch.test:framework:${versions.elasticsearch}" testCompile "org.elasticsearch.test:framework:${versions.elasticsearch}"
// Needed by embedded server
testCompile project(path: ':modules:lang-painless', configuration: 'runtime')
// JDBC testing dependencies // JDBC testing dependencies
testRuntime(project(':x-pack-elasticsearch:sql:jdbc')) testRuntime(project(':x-pack-elasticsearch:sql:jdbc'))
// TODO: Restore shading when https://github.com/elastic/elasticsearch/pull/27955 gets in // TODO: Restore shading when https://github.com/elastic/elasticsearch/pull/27955 gets in
testRuntime("net.sourceforge.csvjdbc:csvjdbc:1.0.34") { testRuntime("net.sourceforge.csvjdbc:csvjdbc:1.0.34") {
transitive = false transitive = false
} }
@ -112,12 +78,6 @@ subprojects {
} }
cliFixture project(':x-pack-elasticsearch:test:sql-cli-fixture') cliFixture project(':x-pack-elasticsearch:test:sql-cli-fixture')
// Used to support embedded testing mode
testRuntime(project(':x-pack-elasticsearch:sql:server')) {
transitive = false
}
testRuntime "org.elasticsearch.client:transport:${version}"
} }
if (project.name != 'security') { if (project.name != 'security') {

View File

@ -1,118 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.qa.sql.embed;
import org.apache.http.HttpHost;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.painless.PainlessPlugin;
import org.elasticsearch.test.InternalTestCluster;
import org.elasticsearch.test.NodeConfigurationSource;
import org.elasticsearch.transport.Netty4Plugin;
import org.elasticsearch.xpack.qa.sql.jdbc.DataLoader;
import org.junit.rules.ExternalResource;
import java.io.IOException;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Properties;
import java.util.function.Function;
import static org.apache.lucene.util.LuceneTestCase.createTempDir;
import static org.apache.lucene.util.LuceneTestCase.random;
import static org.elasticsearch.test.ESTestCase.randomLong;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
/**
* Embedded JDBC server that uses the internal test cluster in the same JVM as the tests.
*/
public class EmbeddedJdbcServer extends ExternalResource {
private InternalTestCluster internalTestCluster;
private String jdbcUrl;
private final Properties properties;
public EmbeddedJdbcServer() {
this(false);
}
public EmbeddedJdbcServer(boolean debug) {
properties = new Properties();
if (debug) {
properties.setProperty("debug", "true");
}
}
@Override
@SuppressWarnings("resource")
protected void before() throws Throwable {
int numNodes = 1;
internalTestCluster = new InternalTestCluster(randomLong(), createTempDir(), false, true, numNodes, numNodes,
"sql_embed", new SqlNodeConfigurationSource(), 0, false, "sql_embed",
Arrays.asList(Netty4Plugin.class, SqlEmbedPlugin.class, PainlessPlugin.class),
Function.identity());
internalTestCluster.beforeTest(random(), 0.5);
Tuple<String, Integer> address = getHttpAddress();
jdbcUrl = "jdbc:es://" + address.v1() + ":" + address.v2();
System.setProperty("tests.rest.cluster", address.v1() + ":" + address.v2());
}
private Tuple<String, Integer> getHttpAddress() {
NodesInfoResponse nodesInfoResponse = internalTestCluster.client().admin().cluster().prepareNodesInfo().get();
assertFalse(nodesInfoResponse.hasFailures());
for (NodeInfo node : nodesInfoResponse.getNodes()) {
if (node.getHttp() != null) {
TransportAddress publishAddress = node.getHttp().address().publishAddress();
return new Tuple<>(publishAddress.getAddress(), publishAddress.getPort());
}
}
throw new IllegalStateException("No http servers found");
}
@Override
protected void after() {
try {
internalTestCluster.afterTest();
} catch (IOException e) {
fail("Failed to shutdown server " + e.getMessage());
} finally {
internalTestCluster.close();
}
}
public Connection connection(Properties props) throws SQLException {
assertNotNull("ES JDBC Server is null - make sure ES is properly run as a @ClassRule", jdbcUrl);
Properties p = new Properties(properties);
p.putAll(props);
return DriverManager.getConnection(jdbcUrl, p);
}
private static class SqlNodeConfigurationSource extends NodeConfigurationSource {
@Override
public Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(NetworkModule.HTTP_ENABLED.getKey(), true) //This test requires HTTP
.build();
}
@Override
public Path nodeConfigPath(int nodeOrdinal) {
return null;
}
}
}

View File

@ -1,101 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.qa.sql.embed;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
import org.elasticsearch.xpack.sql.execution.PlanExecutor;
import org.elasticsearch.xpack.sql.plugin.RestSqlClearCursorAction;
import org.elasticsearch.xpack.sql.plugin.RestSqlListColumnsAction;
import org.elasticsearch.xpack.sql.plugin.RestSqlListTablesAction;
import org.elasticsearch.xpack.sql.plugin.RestSqlQueryAction;
import org.elasticsearch.xpack.sql.plugin.RestSqlTranslateAction;
import org.elasticsearch.xpack.sql.plugin.SqlClearCursorAction;
import org.elasticsearch.xpack.sql.plugin.SqlLicenseChecker;
import org.elasticsearch.xpack.sql.plugin.SqlListColumnsAction;
import org.elasticsearch.xpack.sql.plugin.SqlListTablesAction;
import org.elasticsearch.xpack.sql.plugin.SqlQueryAction;
import org.elasticsearch.xpack.sql.plugin.SqlTranslateAction;
import org.elasticsearch.xpack.sql.plugin.TransportSqlClearCursorAction;
import org.elasticsearch.xpack.sql.plugin.TransportSqlListColumnsAction;
import org.elasticsearch.xpack.sql.plugin.TransportSqlListTablesAction;
import org.elasticsearch.xpack.sql.plugin.TransportSqlQueryAction;
import org.elasticsearch.xpack.sql.plugin.TransportSqlTranslateAction;
import org.elasticsearch.xpack.sql.session.Cursor;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.function.Supplier;
/**
* Plugin for adding SQL functionality to internal test cluster
* <p>
* It is used by in the embeded test mode by {@link EmbeddedJdbcServer}.
*/
public class SqlEmbedPlugin extends Plugin implements ActionPlugin {
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {
return Cursor.getNamedWriteables();
}
private final SqlLicenseChecker sqlLicenseChecker = new SqlLicenseChecker(mode -> { });
public SqlEmbedPlugin() {
}
@Override
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
NamedXContentRegistry xContentRegistry, Environment environment,
NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) {
IndexResolver indexResolver = new IndexResolver(client);
return Arrays.asList(sqlLicenseChecker, indexResolver, new PlanExecutor(client, indexResolver));
}
@Override
public List<RestHandler> getRestHandlers(Settings settings, RestController restController,
ClusterSettings clusterSettings, IndexScopedSettings indexScopedSettings,
SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster) {
return Arrays.asList(new RestSqlQueryAction(settings, restController),
new RestSqlTranslateAction(settings, restController),
new RestSqlClearCursorAction(settings, restController),
new RestSqlListTablesAction(settings, restController),
new RestSqlListColumnsAction(settings, restController));
}
@Override
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
return Arrays.asList(new ActionHandler<>(SqlQueryAction.INSTANCE, TransportSqlQueryAction.class),
new ActionHandler<>(SqlTranslateAction.INSTANCE, TransportSqlTranslateAction.class),
new ActionHandler<>(SqlClearCursorAction.INSTANCE, TransportSqlClearCursorAction.class),
new ActionHandler<>(SqlListTablesAction.INSTANCE, TransportSqlListTablesAction.class),
new ActionHandler<>(SqlListColumnsAction.INSTANCE, TransportSqlListColumnsAction.class));
}
}

View File

@ -1,10 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/**
* Support for testing in embedded mode.
*/
package org.elasticsearch.xpack.qa.sql.embed;

View File

@ -11,7 +11,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.Streams;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration; import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.util.CollectionUtils;
import org.relique.io.TableReader; import org.relique.io.TableReader;
import org.relique.jdbc.csv.CsvConnection; import org.relique.jdbc.csv.CsvConnection;
@ -25,6 +24,7 @@ import java.sql.Connection;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Statement; import java.sql.Statement;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Properties; import java.util.Properties;
@ -41,16 +41,16 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase {
@ParametersFactory(argumentFormatting = PARAM_FORMATTING) @ParametersFactory(argumentFormatting = PARAM_FORMATTING)
public static List<Object[]> readScriptSpec() throws Exception { public static List<Object[]> readScriptSpec() throws Exception {
Parser parser = specParser(); Parser parser = specParser();
return CollectionUtils.combine( List<Object[]> tests = new ArrayList<>();
readScriptSpec("/command.csv-spec", parser), tests.addAll(readScriptSpec("/command.csv-spec", parser));
readScriptSpec("/fulltext.csv-spec", parser), tests.addAll(readScriptSpec("/fulltext.csv-spec", parser));
readScriptSpec("/agg.csv-spec", parser), tests.addAll(readScriptSpec("/agg.csv-spec", parser));
readScriptSpec("/columns.csv-spec", parser), tests.addAll(readScriptSpec("/columns.csv-spec", parser));
readScriptSpec("/datetime.csv-spec", parser), tests.addAll(readScriptSpec("/datetime.csv-spec", parser));
readScriptSpec("/alias.csv-spec", parser), tests.addAll(readScriptSpec("/alias.csv-spec", parser));
readScriptSpec("/nulls.csv-spec", parser), tests.addAll(readScriptSpec("/nulls.csv-spec", parser));
readScriptSpec("/nested.csv-spec", parser) tests.addAll(readScriptSpec("/nested.csv-spec", parser));
); return tests;
} }
public CsvSpecTestCase(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { public CsvSpecTestCase(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) {
@ -142,7 +142,7 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase {
} }
return new Tuple<>(columns.toString(), types.toString()); return new Tuple<>(columns.toString(), types.toString());
} }
private String resolveColumnType(String type) { private String resolveColumnType(String type) {
switch (type.toLowerCase(Locale.ROOT)) { switch (type.toLowerCase(Locale.ROOT)) {
case "s": return "string"; case "s": return "string";
@ -196,4 +196,4 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase {
String query; String query;
String expectedResults; String expectedResults;
} }
} }

View File

@ -15,7 +15,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.qa.sql.embed.EmbeddedJdbcServer;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration; import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.jdbc.jdbcx.JdbcDataSource; import org.elasticsearch.xpack.sql.jdbc.jdbcx.JdbcDataSource;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
@ -39,15 +38,6 @@ import static java.util.Collections.singletonMap;
import static org.elasticsearch.xpack.qa.sql.rest.RestSqlTestCase.assertNoSearchContexts; import static org.elasticsearch.xpack.qa.sql.rest.RestSqlTestCase.assertNoSearchContexts;
public abstract class JdbcIntegrationTestCase extends ESRestTestCase { public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
/**
* Starts an internal cluster instead of using external REST cluster. Useful for IDE debugging.
* Use: -Dtests.embed.sql=true -Dtests.security.manager=false
*/
protected static final boolean EMBED_SQL = Booleans.parseBoolean(System.getProperty("tests.embed.sql", "false"));
@ClassRule
public static final EmbeddedJdbcServer EMBEDDED_SERVER = EMBED_SQL ? new EmbeddedJdbcServer() : null;
@After @After
public void checkSearchContent() throws Exception { public void checkSearchContent() throws Exception {
// Some context might linger due to fire and forget nature of scroll cleanup // Some context might linger due to fire and forget nature of scroll cleanup
@ -67,9 +57,6 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
} }
public Connection esJdbc() throws SQLException { public Connection esJdbc() throws SQLException {
if (EMBED_SQL) {
return EMBEDDED_SERVER.connection(connectionProperties());
}
return randomBoolean() ? useDriverManager() : useDataSource(); return randomBoolean() ? useDriverManager() : useDataSource();
} }

View File

@ -61,14 +61,12 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
@AfterClass @AfterClass
public static void wipeTestData() throws IOException { public static void wipeTestData() throws IOException {
if (false == EMBED_SQL) { try {
try { adminClient().performRequest("DELETE", "/*");
adminClient().performRequest("DELETE", "/*"); } catch (ResponseException e) {
} catch (ResponseException e) { // 404 here just means we had no indexes
// 404 here just means we had no indexes if (e.getResponse().getStatusLine().getStatusCode() != 404) {
if (e.getResponse().getStatusLine().getStatusCode() != 404) { throw e;
throw e;
}
} }
} }
} }

View File

@ -8,11 +8,11 @@ package org.elasticsearch.xpack.qa.sql.jdbc;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration; import org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcConfiguration;
import org.elasticsearch.xpack.sql.util.CollectionUtils;
import org.junit.ClassRule; import org.junit.ClassRule;
import java.sql.Connection; import java.sql.Connection;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Properties; import java.util.Properties;
@ -29,14 +29,14 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
@ParametersFactory(argumentFormatting = PARAM_FORMATTING) @ParametersFactory(argumentFormatting = PARAM_FORMATTING)
public static List<Object[]> readScriptSpec() throws Exception { public static List<Object[]> readScriptSpec() throws Exception {
Parser parser = specParser(); Parser parser = specParser();
return CollectionUtils.combine( List<Object[]> tests = new ArrayList<>();
readScriptSpec("/select.sql-spec", parser), tests.addAll(readScriptSpec("/select.sql-spec", parser));
readScriptSpec("/filter.sql-spec", parser), tests.addAll(readScriptSpec("/filter.sql-spec", parser));
readScriptSpec("/datetime.sql-spec", parser), tests.addAll(readScriptSpec("/datetime.sql-spec", parser));
readScriptSpec("/math.sql-spec", parser), tests.addAll(readScriptSpec("/math.sql-spec", parser));
readScriptSpec("/agg.sql-spec", parser), tests.addAll(readScriptSpec("/agg.sql-spec", parser));
readScriptSpec("/arithmetic.sql-spec", parser) tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser));
); return tests;
} }
private static class SqlSpecParser implements Parser { private static class SqlSpecParser implements Parser {
@ -57,7 +57,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
@Override @Override
protected final void doTest() throws Throwable { protected final void doTest() throws Throwable {
try (Connection h2 = H2.get(); try (Connection h2 = H2.get();
Connection es = esJdbc()) { Connection es = esJdbc()) {
ResultSet expected, elasticResults; ResultSet expected, elasticResults;
@ -75,4 +75,4 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC"); connectionProperties.setProperty(JdbcConfiguration.TIME_ZONE, "UTC");
return connectionProperties; return connectionProperties;
} }
} }