SQL: [Tests] Move JDBC integration tests to new module (#56872) (#57072)

Move the JDBC functionality integration tests from `:sql:qa` to a separate
module `:sql:qa:jdbc`. This way the tests are isolated from the rest of the
integration tests and they only depend to the `:sql:jdbc` module, thus
removing the danger of accidentally pulling in some dependency that may
hide bugs.

Moreover this is a preparation for #56722, so that we can run those tests
between different JDBC and ES node versions and ensure forward
compatibility.

Move the rest of existing tests inside a new `:sql:qa:server` project, so that
the `:sql:qa` becomes the parent project for both and one can run all the integration
tests by using this parent project.

(cherry picked from commit c09f4a04484b8a43934fe58fbc41bd90b7dbcc76)
This commit is contained in:
Marios Trivyzas 2020-05-22 17:49:36 +02:00 committed by GitHub
parent d8165a3439
commit b91bae30b1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
202 changed files with 4332 additions and 2858 deletions

View File

@ -3,10 +3,11 @@
[[xpack-sql]]
= SQL access
:sql-tests: {xes-repo-dir}/../../plugin/sql/qa
:sql-specs: {sql-tests}/src/main/resources/
:jdbc-tests: {sql-tests}/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc
:security-tests: {sql-tests}/security/src/test/java/org/elasticsearch/xpack/sql/qa/security
:sql-tests: {xes-repo-dir}/../../plugin/sql/qa/
:sql-specs: {sql-tests}server/src/main/resources/
:jdbc-tests: {sql-tests}jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc
:security-tests: {sql-tests}server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security
:es-sql: Elasticsearch SQL
[partintro]
--
@ -15,7 +16,7 @@ X-Pack includes a SQL feature to execute SQL queries against {es}
indices and return results in tabular format.
The following chapters aim to cover everything from usage, to syntax and drivers.
Experience users or those in a hurry might want to jump directly to
Experience users or those in a hurry might want to jump directly to
the list of SQL <<sql-commands, commands>> and <<sql-functions, functions>>.
<<sql-overview, Overview>>::

View File

@ -35,6 +35,6 @@ indices:
[source, yaml]
--------------------------------------------------
include-tagged::{sql-tests}/security/roles.yml[cli_drivers]
include-tagged::{sql-tests}server/security/roles.yml[cli_drivers]
--------------------------------------------------

View File

@ -1,128 +1 @@
description = 'Integration tests for SQL'
apply plugin: 'elasticsearch.build'
archivesBaseName = 'qa-sql'
group = "org.elasticsearch.x-pack.qa.sql"
dependencies {
compile project(":test:framework")
// JDBC testing dependencies
compile project(path: xpackModule('sql:jdbc'))
compile "net.sourceforge.csvjdbc:csvjdbc:${csvjdbcVersion}"
// CLI testing dependencies
compile project(path: xpackModule('sql:sql-cli'))
// H2GIS testing dependencies
compile("org.orbisgis:h2gis:${h2gisVersion}") {
exclude group: "org.locationtech.jts"
}
// select just the parts of JLine that are needed
compile("org.jline:jline-terminal-jna:${jlineVersion}") {
exclude group: "net.java.dev.jna"
}
compile "org.jline:jline-terminal:${jlineVersion}"
compile "org.jline:jline-reader:${jlineVersion}"
compile "org.jline:jline-style:${jlineVersion}"
testRuntime "org.elasticsearch:jna:${versions.jna}"
}
/* disable unit tests because these are all integration tests used
* other qa projects. */
test.enabled = false
dependencyLicenses.enabled = false
dependenciesInfo.enabled = false
// the main files are actually test files, so use the appropriate forbidden api sigs
tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'es-all-signatures', 'es-test-signatures'
}
// just a test fixture: we aren't using this jars in releases and H2GIS requires disabling a lot of checks
thirdPartyAudit.enabled = false
subprojects {
if (subprojects.isEmpty()) {
// leaf project
apply plugin: 'elasticsearch.standalone-rest-test'
} else {
apply plugin: 'elasticsearch.build'
}
configurations.testRuntimeClasspath {
resolutionStrategy.force "org.slf4j:slf4j-api:1.7.25"
}
configurations.testRuntime {
// This is also required to make resolveAllDependencies work
resolutionStrategy.force "org.slf4j:slf4j-api:1.7.25"
}
dependencies {
/* Since we're a standalone rest test we actually get transitive
* dependencies but we don't really want them because they cause
* all kinds of trouble with the jar hell checks. So we suppress
* them explicitly for non-es projects. */
testCompile(xpackProject('plugin:sql:qa')) {
transitive = false
}
testCompile project(":test:framework")
// JDBC testing dependencies
testRuntime "net.sourceforge.csvjdbc:csvjdbc:${csvjdbcVersion}"
testRuntime "com.h2database:h2:${h2Version}"
// H2GIS testing dependencies
testRuntime("org.orbisgis:h2gis:${h2gisVersion}") {
exclude group: "org.locationtech.jts"
exclude group: "com.fasterxml.jackson.core"
}
testRuntime project(path: xpackModule('sql:jdbc'))
testRuntime xpackProject('plugin:sql:sql-client')
// TODO check if needed
testRuntime("org.antlr:antlr4-runtime:${antlrVersion}") {
transitive = false
}
// CLI testing dependencies
testRuntime project(path: xpackModule('sql:sql-cli'))
testRuntime(xpackProject('plugin:sql:sql-action')) {
transitive = false
}
testRuntime("org.jline:jline-terminal-jna:${jlineVersion}") {
exclude group: "net.java.dev.jna"
}
testRuntime "org.jline:jline-terminal:${jlineVersion}"
testRuntime "org.jline:jline-reader:${jlineVersion}"
testRuntime "org.jline:jline-style:${jlineVersion}"
testRuntime "org.elasticsearch:jna:${versions.jna}"
// spatial dependency
testRuntime project(path: xpackModule('spatial'))
}
if (project.name != 'security') {
// The security project just configures its subprojects
apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.rest-test'
testClusters.integTest {
testDistribution = 'DEFAULT'
setting 'xpack.ml.enabled', 'false'
setting 'xpack.watcher.enabled', 'false'
}
task runqa {
doFirst {
println "Run with `-Dtestclusters.inspect.failure=true integTest` to leave the cluster running after failure"
}
}
}
}

View File

@ -0,0 +1,61 @@
description = 'Integration tests for SQL JDBC driver'
apply plugin: 'elasticsearch.build'
// Avoid circular dependency
group = "org.elasticsearch.x-pack.qa.sql.jdbc"
dependencies {
compile project(":test:framework")
// JDBC testing dependencies
compile project(path: xpackModule('sql:jdbc'))
}
/* disable unit tests because these are all integration tests used
* other qa projects. */
test.enabled = false
dependencyLicenses.enabled = false
dependenciesInfo.enabled = false
// the main files are actually test files, so use the appropriate forbidden api sigs
tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'es-all-signatures', 'es-test-signatures'
}
// just a test fixture: we aren't using this jars in releases and H2GIS requires disabling a lot of checks
thirdPartyAudit.enabled = false
subprojects {
if (subprojects.isEmpty()) {
// leaf project
apply plugin: 'elasticsearch.standalone-rest-test'
} else {
apply plugin: 'elasticsearch.build'
}
dependencies {
/* Since we're a standalone rest test we actually get transitive
* dependencies but we don't really want them because they cause
* all kinds of trouble with the jar hell checks. So we suppress
* them explicitly for non-es projects. */
testCompile(xpackProject('plugin:sql:qa:jdbc')) {
transitive = false
}
testCompile project(":test:framework")
testRuntime project(path: xpackModule('sql:jdbc'))
}
if (project.name != 'security') {
// The security project just configures its subprojects
apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.rest-test'
testClusters.integTest {
testDistribution = 'DEFAULT'
setting 'xpack.ml.enabled', 'false'
setting 'xpack.watcher.enabled', 'false'
}
}
}

View File

@ -0,0 +1,7 @@
description = 'Run SQL JDBC tests against multiple nodes'
testClusters.integTest {
numberOfNodes = 2
setting 'xpack.security.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
}

View File

@ -3,9 +3,8 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.single_node;
package org.elasticsearch.xpack.sql.qa.jdbc.multi_node;
import org.elasticsearch.xpack.sql.qa.jdbc.ConnectionTestCase;
public class JdbcConnectionIT extends ConnectionTestCase {
}
public class JdbcConnectionIT extends ConnectionTestCase {}

View File

@ -3,9 +3,8 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.single_node;
package org.elasticsearch.xpack.sql.qa.jdbc.multi_node;
import org.elasticsearch.xpack.sql.qa.jdbc.FetchSizeTestCase;
public class JdbcFetchSizeIT extends FetchSizeTestCase {
}
public class JdbcFetchSizeIT extends FetchSizeTestCase {}

View File

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc.multi_node;
import org.elasticsearch.xpack.sql.qa.jdbc.JdbcErrorsTestCase;
public class JdbcJdbcErrorsIT extends JdbcErrorsTestCase {}

View File

@ -3,9 +3,8 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.multi_node;
package org.elasticsearch.xpack.sql.qa.jdbc.multi_node;
import org.elasticsearch.xpack.sql.qa.jdbc.PreparedStatementTestCase;
public class JdbcPreparedStatementIT extends PreparedStatementTestCase {
}
public class JdbcPreparedStatementIT extends PreparedStatementTestCase {}

View File

@ -0,0 +1,4 @@
testClusters.integTest {
setting 'xpack.security.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
}

View File

@ -4,10 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.no_sql;
package org.elasticsearch.xpack.sql.qa.jdbc.no_sql;
import org.elasticsearch.xpack.sql.qa.jdbc.JdbcNoSqlTestCase;
public class JdbcNoSqlIT extends JdbcNoSqlTestCase {
}
public class JdbcNoSqlIT extends JdbcNoSqlTestCase {}

View File

@ -0,0 +1,61 @@
dependencies {
testCompile project(':x-pack:plugin:core')
}
Project mainProject = project
configurations.create('testArtifacts')
TaskProvider testJar = tasks.register("testJar", Jar) {
appendix 'test'
from sourceSets.test.output
}
artifacts {
testArtifacts testJar
}
// Tests are pushed down to subprojects and will be checked there.
testingConventions.enabled = false
subprojects {
// Use tests from the root security qa project in subprojects
configurations.create('testArtifacts')
dependencies {
testCompile project(":x-pack:plugin:core")
testArtifacts project(path: mainProject.path, configuration: 'testArtifacts')
}
testClusters.integTest {
testDistribution = 'DEFAULT'
// Setup auditing so we can use it in some tests
setting 'xpack.security.audit.enabled', 'true'
setting 'xpack.security.enabled', 'true'
setting 'xpack.license.self_generated.type', 'trial'
// Setup roles used by tests
extraConfigFile 'roles.yml', mainProject.file('roles.yml')
/* Setup the one admin user that we run the tests as.
* Tests use "run as" to get different users. */
user username: "test_admin", password: "x-pack-test-password"
}
File testArtifactsDir = project.file("$buildDir/testArtifacts")
TaskProvider copyTestClasses = tasks.register("copyTestClasses", Copy) {
dependsOn configurations.testArtifacts
from { zipTree(configurations.testArtifacts.singleFile) }
into testArtifactsDir
}
integTest.runner {
dependsOn copyTestClasses
testClassesDirs += project.files(testArtifactsDir)
classpath += configurations.testArtifacts
nonInputProperties.systemProperty 'tests.audit.logfile',
"${-> testClusters.integTest.singleNode().getAuditLog()}"
nonInputProperties.systemProperty 'tests.audit.yesterday.logfile',
"${-> testClusters.integTest.singleNode().getAuditLog().getParentFile()}/integTest_audit-${new Date().format('yyyy-MM-dd')}.json"
}
testingConventions.enabled = false
}

View File

@ -0,0 +1,61 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc.security;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.sql.qa.jdbc.ConnectionTestCase;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Properties;
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
public class JdbcConnectionIT extends ConnectionTestCase {
static final boolean SSL_ENABLED = Booleans.parseBoolean(System.getProperty("tests.ssl.enabled"), false);
static Settings securitySettings() {
String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray()));
Settings.Builder builder = Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token);
if (SSL_ENABLED) {
Path keyStore;
try {
keyStore = PathUtils.get(getTestClass().getResource("/test-node.jks").toURI());
} catch (URISyntaxException e) {
throw new RuntimeException("exception while reading the store", e);
}
if (!Files.exists(keyStore)) {
throw new IllegalStateException("Keystore file [" + keyStore + "] does not exist.");
}
builder.put(ESRestTestCase.TRUSTSTORE_PATH, keyStore).put(ESRestTestCase.TRUSTSTORE_PASSWORD, "keypass");
}
return builder.build();
}
@Override
protected Settings restClientSettings() {
return securitySettings();
}
@Override
protected String getProtocol() {
return SSL_ENABLED ? "https" : "http";
}
@Override
protected Properties connectionProperties() {
Properties properties = super.connectionProperties();
properties.putAll(JdbcSecurityUtils.adminProperties());
return properties;
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.security;
package org.elasticsearch.xpack.sql.qa.jdbc.security;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.sql.qa.jdbc.FetchSizeTestCase;
@ -11,20 +11,21 @@ import org.elasticsearch.xpack.sql.qa.jdbc.FetchSizeTestCase;
import java.util.Properties;
public class JdbcFetchSizeIT extends FetchSizeTestCase {
@Override
protected Settings restClientSettings() {
return RestSqlIT.securitySettings();
return JdbcConnectionIT.securitySettings();
}
@Override
protected String getProtocol() {
return RestSqlIT.SSL_ENABLED ? "https" : "http";
return JdbcConnectionIT.SSL_ENABLED ? "https" : "http";
}
@Override
protected Properties connectionProperties() {
Properties properties = super.connectionProperties();
properties.putAll(JdbcSecurityIT.adminProperties());
properties.putAll(JdbcSecurityUtils.adminProperties());
return properties;
}
}

View File

@ -3,28 +3,29 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.security;
package org.elasticsearch.xpack.sql.qa.jdbc.security;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.sql.qa.jdbc.ConnectionTestCase;
import org.elasticsearch.xpack.sql.qa.jdbc.JdbcErrorsTestCase;
import java.util.Properties;
public class JdbcConnectionIT extends ConnectionTestCase {
public class JdbcJdbcErrorsIT extends JdbcErrorsTestCase {
@Override
protected Settings restClientSettings() {
return RestSqlIT.securitySettings();
return JdbcConnectionIT.securitySettings();
}
@Override
protected String getProtocol() {
return RestSqlIT.SSL_ENABLED ? "https" : "http";
return JdbcConnectionIT.SSL_ENABLED ? "https" : "http";
}
@Override
protected Properties connectionProperties() {
Properties properties = super.connectionProperties();
properties.putAll(JdbcSecurityIT.adminProperties());
properties.putAll(JdbcSecurityUtils.adminProperties());
return properties;
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.security;
package org.elasticsearch.xpack.sql.qa.jdbc.security;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.sql.qa.jdbc.PreparedStatementTestCase;
@ -11,20 +11,21 @@ import org.elasticsearch.xpack.sql.qa.jdbc.PreparedStatementTestCase;
import java.util.Properties;
public class JdbcPreparedStatementIT extends PreparedStatementTestCase {
@Override
protected Settings restClientSettings() {
return RestSqlIT.securitySettings();
return JdbcConnectionIT.securitySettings();
}
@Override
protected String getProtocol() {
return RestSqlIT.SSL_ENABLED ? "https" : "http";
return JdbcConnectionIT.SSL_ENABLED ? "https" : "http";
}
@Override
protected Properties connectionProperties() {
Properties sp = super.connectionProperties();
sp.putAll(JdbcSecurityIT.adminProperties());
sp.putAll(JdbcSecurityUtils.adminProperties());
return sp;
}
}

View File

@ -0,0 +1,52 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc.security;
import org.elasticsearch.common.io.PathUtils;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Properties;
import static org.apache.lucene.util.LuceneTestCase.getTestClass;
final class JdbcSecurityUtils {
private JdbcSecurityUtils() {}
static Properties adminProperties() {
// tag::admin_properties
Properties properties = new Properties();
properties.put("user", "test_admin");
properties.put("password", "x-pack-test-password");
// end::admin_properties
addSslPropertiesIfNeeded(properties);
return properties;
}
private static void addSslPropertiesIfNeeded(Properties properties) {
if (false == JdbcConnectionIT.SSL_ENABLED) {
return;
}
Path keyStore;
try {
keyStore = PathUtils.get(getTestClass().getResource("/test-node.jks").toURI());
} catch (URISyntaxException e) {
throw new RuntimeException("exception while reading the store", e);
}
if (!Files.exists(keyStore)) {
throw new IllegalStateException("Keystore file [" + keyStore + "] does not exist.");
}
String keyStoreStr = keyStore.toAbsolutePath().toString();
properties.put("ssl", "true");
properties.put("ssl.keystore.location", keyStoreStr);
properties.put("ssl.keystore.pass", "keypass");
properties.put("ssl.truststore.location", keyStoreStr);
properties.put("ssl.truststore.pass", "keypass");
}
}

View File

@ -3,7 +3,7 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.security;
package org.elasticsearch.xpack.sql.qa.jdbc.security;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.sql.qa.jdbc.SimpleExampleTestCase;
@ -11,20 +11,21 @@ import org.elasticsearch.xpack.sql.qa.jdbc.SimpleExampleTestCase;
import java.util.Properties;
public class JdbcSimpleExampleIT extends SimpleExampleTestCase {
@Override
protected Settings restClientSettings() {
return RestSqlIT.securitySettings();
return JdbcConnectionIT.securitySettings();
}
@Override
protected String getProtocol() {
return RestSqlIT.SSL_ENABLED ? "https" : "http";
return JdbcConnectionIT.SSL_ENABLED ? "https" : "http";
}
@Override
protected Properties connectionProperties() {
Properties properties = super.connectionProperties();
properties.putAll(JdbcSecurityIT.adminProperties());
properties.putAll(JdbcSecurityUtils.adminProperties());
return properties;
}
}

View File

@ -0,0 +1,340 @@
import org.elasticsearch.gradle.LoggedExec
import org.elasticsearch.gradle.info.BuildParams
// Tell the tests we're running with ssl enabled
integTest.runner {
systemProperty 'tests.ssl.enabled', 'true'
}
// needed to be consistent with ssl host checking
Object san = new SanEvaluator()
// needed to be consistent with ssl host checking
String host = InetAddress.getLoopbackAddress().getHostAddress();
// location of generated keystores and certificates
File keystoreDir = new File(project.buildDir, 'keystore')
// Generate the node's keystore
File nodeKeystore = file("$keystoreDir/test-node.jks")
task createNodeKeyStore(type: LoggedExec) {
doFirst {
if (nodeKeystore.parentFile.exists() == false) {
nodeKeystore.parentFile.mkdirs()
}
if (nodeKeystore.exists()) {
delete nodeKeystore
}
}
executable = "${BuildParams.compilerJavaHome}/bin/keytool"
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
args '-genkey',
'-alias', 'test-node',
'-keystore', nodeKeystore,
'-keyalg', 'RSA',
'-keysize', '2048',
'-validity', '712',
'-dname', 'CN=' + host,
'-keypass', 'keypass',
'-storepass', 'keypass',
'-ext', san
}
// Generate the client's keystore
File clientKeyStore = file("$keystoreDir/test-client.jks")
task createClientKeyStore(type: LoggedExec) {
doFirst {
if (clientKeyStore.parentFile.exists() == false) {
clientKeyStore.parentFile.mkdirs()
}
if (clientKeyStore.exists()) {
delete clientKeyStore
}
}
executable = "${BuildParams.runtimeJavaHome}/bin/keytool"
standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8'))
args '-genkey',
'-alias', 'test-client',
'-keystore', clientKeyStore,
'-keyalg', 'RSA',
'-keysize', '2048',
'-validity', '712',
'-dname', 'CN=' + host,
'-keypass', 'keypass',
'-storepass', 'keypass',
'-ext', san
}
// Export the node's certificate
File nodeCertificate = file("$keystoreDir/test-node.cert")
task exportNodeCertificate(type: LoggedExec) {
dependsOn createNodeKeyStore
doFirst {
if (nodeCertificate.parentFile.exists() == false) {
nodeCertificate.parentFile.mkdirs()
}
if (nodeCertificate.exists()) {
delete nodeCertificate
}
}
executable = "${BuildParams.runtimeJavaHome}/bin/keytool"
args '-export',
'-alias', 'test-node',
'-keystore', nodeKeystore,
'-storepass', 'keypass',
'-file', nodeCertificate
}
// Import the node certificate in the client's keystore
task importNodeCertificateInClientKeyStore(type: LoggedExec) {
dependsOn createClientKeyStore, exportNodeCertificate
executable = "${BuildParams.runtimeJavaHome}/bin/keytool"
args '-import',
'-alias', 'test-node',
'-keystore', clientKeyStore,
'-storepass', 'keypass',
'-file', nodeCertificate,
'-noprompt'
}
// Export the client's certificate
File clientCertificate = file("$keystoreDir/test-client.cert")
task exportClientCertificate(type: LoggedExec) {
dependsOn createClientKeyStore
doFirst {
if (clientCertificate.parentFile.exists() == false) {
clientCertificate.parentFile.mkdirs()
}
if (clientCertificate.exists()) {
delete clientCertificate
}
}
executable = "${BuildParams.runtimeJavaHome}/bin/keytool"
args '-export',
'-alias', 'test-client',
'-keystore', clientKeyStore,
'-storepass', 'keypass',
'-file', clientCertificate
}
// Import the client certificate in the node's keystore
task importClientCertificateInNodeKeyStore(type: LoggedExec) {
dependsOn createNodeKeyStore, exportClientCertificate
executable = "${BuildParams.runtimeJavaHome}/bin/keytool"
args '-import',
'-alias', 'test-client',
'-keystore', nodeKeystore,
'-storepass', 'keypass',
'-file', clientCertificate,
'-noprompt'
}
forbiddenPatterns {
exclude '**/*.cert'
}
// Add keystores to test classpath: it expects it there
sourceSets.test.resources.srcDir(keystoreDir)
processTestResources.dependsOn(importNodeCertificateInClientKeyStore, importClientCertificateInNodeKeyStore)
integTest.runner {
dependsOn(importClientCertificateInNodeKeyStore)
onlyIf {
// Do not attempt to form a cluster in a FIPS JVM, as doing so with a JKS keystore will fail.
// TODO Revisit this when SQL CLI client can handle key/certificate instead of only Keystores.
// https://github.com/elastic/elasticsearch/issues/32306
BuildParams.inFipsJvm == false
}
}
testClusters.integTest {
// The setup that we actually want
setting 'xpack.license.self_generated.type', 'trial'
setting 'xpack.security.http.ssl.enabled', 'true'
setting 'xpack.security.transport.ssl.enabled', 'true'
// ceremony to set up ssl
setting 'xpack.security.transport.ssl.keystore.path', 'test-node.jks'
setting 'xpack.security.http.ssl.keystore.path', 'test-node.jks'
keystore 'xpack.security.transport.ssl.keystore.secure_password', 'keypass'
keystore 'xpack.security.http.ssl.keystore.secure_password', 'keypass'
// copy keystores into config/
extraConfigFile nodeKeystore.name, nodeKeystore
extraConfigFile clientKeyStore.name, clientKeyStore
}
/** A lazy evaluator to find the san to use for certificate generation. */
class SanEvaluator {
private static String san = null
String toString() {
synchronized (SanEvaluator.class) {
if (san == null) {
san = getSubjectAlternativeNameString()
}
}
return san
}
// Code stolen from NetworkUtils/InetAddresses/NetworkAddress to support SAN
/** Return all interfaces (and subinterfaces) on the system */
private static List<NetworkInterface> getInterfaces() throws SocketException {
List<NetworkInterface> all = new ArrayList<>();
addAllInterfaces(all, Collections.list(NetworkInterface.getNetworkInterfaces()));
Collections.sort(all, new Comparator<NetworkInterface>() {
@Override
public int compare(NetworkInterface left, NetworkInterface right) {
return Integer.compare(left.getIndex(), right.getIndex());
}
});
return all;
}
/** Helper for getInterfaces, recursively adds subinterfaces to {@code target} */
private static void addAllInterfaces(List<NetworkInterface> target, List<NetworkInterface> level) {
if (!level.isEmpty()) {
target.addAll(level);
for (NetworkInterface intf : level) {
addAllInterfaces(target, Collections.list(intf.getSubInterfaces()));
}
}
}
private static String getSubjectAlternativeNameString() {
List<InetAddress> list = new ArrayList<>();
for (NetworkInterface intf : getInterfaces()) {
for (final InetAddress address : Collections.list(intf.getInetAddresses())) {
/*
* Some OS (e.g., BSD) assign a link-local address to the loopback interface. While technically not a loopback interface, some of
* these OS treat them as one (e.g., localhost on macOS), so we must too. Otherwise, things just won't work out of the box. So we
* include all addresses from loopback interfaces.
*
* By checking if the interface is a loopback interface or the address is a loopback address first, we avoid having to check if the
* interface is up unless necessary. This means we can avoid checking if the interface is up for virtual ethernet devices which have
* a tendency to disappear outside of our control (e.g., due to Docker).
*/
if ((intf.isLoopback() || address.isLoopbackAddress()) && isUp(intf, address)) {
list.add(address)
}
}
}
if (list.isEmpty()) {
throw new IllegalArgumentException("no up-and-running loopback addresses found, got " + getInterfaces());
}
StringBuilder builder = new StringBuilder("san=");
for (int i = 0; i < list.size(); i++) {
InetAddress address = list.get(i);
String hostAddress;
if (address instanceof Inet6Address) {
hostAddress = compressedIPV6Address((Inet6Address) address);
} else {
hostAddress = address.getHostAddress();
}
builder.append("ip:").append(hostAddress);
String hostname = address.getHostName();
if (hostname.equals(address.getHostAddress()) == false) {
builder.append(",dns:").append(hostname);
}
if (i != (list.size() - 1)) {
builder.append(",");
}
}
return builder.toString();
}
private static boolean isUp(final NetworkInterface intf, final InetAddress address) throws IOException {
try {
return intf.isUp();
} catch (final SocketException e) {
/*
* In Elasticsearch production code (NetworkUtils) we suppress this if the device is a virtual ethernet device. That should not happen
* here since the interface must be a loopback device or the address a loopback address to get here to begin with.
*/
assert intf.isLoopback() || address.isLoopbackAddress()
throw new IOException("failed to check if interface [" + intf.getName() + "] is up", e)
}
}
private static String compressedIPV6Address(Inet6Address inet6Address) {
byte[] bytes = inet6Address.getAddress();
int[] hextets = new int[8];
for (int i = 0; i < hextets.length; i++) {
hextets[i] = (bytes[2 * i] & 255) << 8 | bytes[2 * i + 1] & 255;
}
compressLongestRunOfZeroes(hextets);
return hextetsToIPv6String(hextets);
}
/**
* Identify and mark the longest run of zeroes in an IPv6 address.
*
* <p>Only runs of two or more hextets are considered. In case of a tie, the
* leftmost run wins. If a qualifying run is found, its hextets are replaced
* by the sentinel value -1.
*
* @param hextets {@code int[]} mutable array of eight 16-bit hextets
*/
private static void compressLongestRunOfZeroes(int[] hextets) {
int bestRunStart = -1;
int bestRunLength = -1;
int runStart = -1;
for (int i = 0; i < hextets.length + 1; i++) {
if (i < hextets.length && hextets[i] == 0) {
if (runStart < 0) {
runStart = i;
}
} else if (runStart >= 0) {
int runLength = i - runStart;
if (runLength > bestRunLength) {
bestRunStart = runStart;
bestRunLength = runLength;
}
runStart = -1;
}
}
if (bestRunLength >= 2) {
Arrays.fill(hextets, bestRunStart, bestRunStart + bestRunLength, -1);
}
}
/**
* Convert a list of hextets into a human-readable IPv6 address.
*
* <p>In order for "::" compression to work, the input should contain negative
* sentinel values in place of the elided zeroes.
*
* @param hextets {@code int[]} array of eight 16-bit hextets, or -1s
*/
private static String hextetsToIPv6String(int[] hextets) {
/*
* While scanning the array, handle these state transitions:
* start->num => "num" start->gap => "::"
* num->num => ":num" num->gap => "::"
* gap->num => "num" gap->gap => ""
*/
StringBuilder buf = new StringBuilder(39);
boolean lastWasNumber = false;
for (int i = 0; i < hextets.length; i++) {
boolean thisIsNumber = hextets[i] >= 0;
if (thisIsNumber) {
if (lastWasNumber) {
buf.append(':');
}
buf.append(Integer.toHexString(hextets[i]));
} else {
if (i == 0 || lastWasNumber) {
buf.append("::");
}
}
lastWasNumber = thisIsNumber;
}
return buf.toString();
}
}

View File

@ -0,0 +1,7 @@
integTest.runner {
systemProperty 'tests.ssl.enabled', 'false'
}
testClusters.integTest {
setting 'xpack.license.self_generated.type', 'trial'
}

View File

@ -0,0 +1,4 @@
testClusters.integTest {
setting 'xpack.security.enabled', 'false'
setting 'xpack.license.self_generated.type', 'trial'
}

View File

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc.single_node;
import org.elasticsearch.xpack.sql.qa.jdbc.ConnectionTestCase;
public class JdbcConnectionIT extends ConnectionTestCase {}

View File

@ -3,9 +3,8 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.multi_node;
package org.elasticsearch.xpack.sql.qa.jdbc.single_node;
import org.elasticsearch.xpack.sql.qa.jdbc.ErrorsTestCase;
import org.elasticsearch.xpack.sql.qa.jdbc.FetchSizeTestCase;
public class JdbcErrorsIT extends ErrorsTestCase {
}
public class JdbcFetchSizeIT extends FetchSizeTestCase {}

View File

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc.single_node;
import org.elasticsearch.xpack.sql.qa.jdbc.JdbcErrorsTestCase;
public class JdbcJdbcErrorsIT extends JdbcErrorsTestCase {}

View File

@ -3,9 +3,8 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.single_node;
package org.elasticsearch.xpack.sql.qa.jdbc.single_node;
import org.elasticsearch.xpack.sql.qa.jdbc.PreparedStatementTestCase;
public class JdbcPreparedStatementIT extends PreparedStatementTestCase {
}
public class JdbcPreparedStatementIT extends PreparedStatementTestCase {}

View File

@ -0,0 +1,11 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc.single_node;
import org.elasticsearch.xpack.sql.qa.jdbc.ResultSetTestCase;
public class JdbcResultSetIT extends ResultSetTestCase {}

View File

@ -4,10 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.single_node;
package org.elasticsearch.xpack.sql.qa.jdbc.single_node;
import org.elasticsearch.xpack.sql.qa.jdbc.ResultSetMetaDataTestCase;
public class JdbcResultSetMetaDataIT extends ResultSetMetaDataTestCase {
}
public class JdbcResultSetMetaDataIT extends ResultSetMetaDataTestCase {}

View File

@ -0,0 +1,52 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc.single_node;
import org.elasticsearch.client.Request;
import org.elasticsearch.xpack.sql.qa.jdbc.JdbcIntegrationTestCase;
import org.junit.Before;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import static org.hamcrest.Matchers.containsString;
public class JdbcShardFailureIT extends JdbcIntegrationTestCase {
@Before
public void createTestIndex() throws IOException {
Request createTest1 = new Request("PUT", "/test1");
String body1 = "{\"aliases\":{\"test\":{}}, \"mappings\": {\"properties\": {\"test_field\":{\"type\":\"integer\"}}}}";
createTest1.setJsonEntity(body1);
client().performRequest(createTest1);
Request createTest2 = new Request("PUT", "/test2");
String body2 = "{\"aliases\":{\"test\":{}}, \"mappings\": {\"properties\": {\"test_field\":{\"type\":\"integer\"}}},"
+ "\"settings\": {\"index.routing.allocation.include.node\": \"nowhere\"}}";
createTest2.setJsonEntity(body2);
createTest2.addParameter("timeout", "100ms");
client().performRequest(createTest2);
Request request = new Request("PUT", "/test1/_bulk");
request.addParameter("refresh", "true");
StringBuilder bulk = new StringBuilder();
for (int i = 0; i < 20; i++) {
bulk.append("{\"index\":{}}\n");
bulk.append("{\"test_field\":").append(i).append("}\n");
}
request.setJsonEntity(bulk.toString());
client().performRequest(request);
}
public void testPartialResponseHandling() throws SQLException {
try (Connection c = esJdbc(); Statement s = c.createStatement()) {
SQLException exception = expectThrows(SQLException.class, () -> s.executeQuery("SELECT * FROM test ORDER BY test_field ASC"));
assertThat(exception.getMessage(), containsString("Search rejected due to missing shards"));
}
}
}

View File

@ -3,9 +3,8 @@
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.single_node;
package org.elasticsearch.xpack.sql.qa.jdbc.single_node;
import org.elasticsearch.xpack.sql.qa.jdbc.SimpleExampleTestCase;
public class JdbcSimpleExampleIT extends SimpleExampleTestCase {
}
public class JdbcSimpleExampleIT extends SimpleExampleTestCase {}

View File

@ -15,6 +15,7 @@ import java.sql.SQLException;
* Test the jdbc {@link Connection} implementation.
*/
public abstract class ConnectionTestCase extends JdbcIntegrationTestCase {
public void testConnectionProperties() throws SQLException {
try (Connection c = esJdbc()) {
assertFalse(c.isClosed());
@ -34,7 +35,7 @@ public abstract class ConnectionTestCase extends JdbcIntegrationTestCase {
/**
* Tests that we throw report no transaction isolation and throw sensible errors if you ask for any.
*/
public void testTransactionIsolation() throws Exception {
public void testTransactionIsolation() throws SQLException {
try (Connection c = esJdbc()) {
assertEquals(Connection.TRANSACTION_NONE, c.getTransactionIsolation());
SQLException e = expectThrows(SQLException.class, () -> c.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE));
@ -42,4 +43,4 @@ public abstract class ConnectionTestCase extends JdbcIntegrationTestCase {
assertEquals(Connection.TRANSACTION_NONE, c.getTransactionIsolation());
}
}
}
}

View File

@ -21,14 +21,12 @@ import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.Properties;
import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.JDBC_TIMEZONE;
import static org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase.assertNoSearchContexts;
/**
* Tests for setting {@link Statement#setFetchSize(int)} and
* {@link ResultSet#getFetchSize()}.
*/
public class FetchSizeTestCase extends JdbcIntegrationTestCase {
public abstract class FetchSizeTestCase extends JdbcIntegrationTestCase {
@Before
public void createTestIndex() throws IOException {
Request request = new Request("PUT", "/test");
@ -48,7 +46,7 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
createIndex.endObject().endObject();
request.setJsonEntity(Strings.toString(createIndex));
client().performRequest(request);
request = new Request("PUT", "/test/_bulk");
request.addParameter("refresh", "true");
StringBuilder bulk = new StringBuilder();
@ -59,7 +57,7 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
bulkLine.append(", \"nested\":[");
// each document will have a nested field with 1 - 5 values
for (int j = 0; j <= i % 5; j++) {
bulkLine.append("{\"inner_field\":" + j + "}" + ((j == i % 5) ? "" : ","));
bulkLine.append("{\"inner_field\":").append(j).append("}").append((j == i % 5) ? "" : ",");
}
bulkLine.append("]");
bulk.append(bulkLine).append("}\n");
@ -73,8 +71,7 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
* In this case the fetch size should be entirely respected.
*/
public void testScroll() throws SQLException {
try (Connection c = esJdbc();
Statement s = c.createStatement()) {
try (Connection c = esJdbc(); Statement s = c.createStatement()) {
s.setFetchSize(4);
try (ResultSet rs = s.executeQuery("SELECT * FROM test ORDER BY test_field ASC")) {
for (int i = 0; i < 20; i++) {
@ -91,9 +88,8 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
* Test for {@code SELECT} that is implemented as a scroll query.
* In this test we don't retrieve all records and rely on close() to clean the cursor
*/
public void testIncompleteScroll() throws Exception {
try (Connection c = esJdbc();
Statement s = c.createStatement()) {
public void testIncompleteScroll() throws SQLException {
try (Connection c = esJdbc(); Statement s = c.createStatement()) {
s.setFetchSize(4);
try (ResultSet rs = s.executeQuery("SELECT * FROM test ORDER BY test_field ASC")) {
for (int i = 0; i < 10; i++) {
@ -104,7 +100,6 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
assertTrue(rs.next());
}
}
assertNoSearchContexts();
}
public void testScrollWithDatetimeAndTimezoneParam() throws IOException, SQLException {
@ -136,17 +131,17 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
ZoneId zoneId = randomZone();
Properties connectionProperties = connectionProperties();
connectionProperties.put(JDBC_TIMEZONE, zoneId.toString());
try (Connection c = esJdbc(connectionProperties);
Statement s = c.createStatement()) {
connectionProperties.put(JdbcTestUtils.JDBC_TIMEZONE, zoneId.toString());
try (Connection c = esJdbc(connectionProperties); Statement s = c.createStatement()) {
s.setFetchSize(2);
try (ResultSet rs =
s.executeQuery("SELECT DATE_PART('TZOFFSET', date) FROM test_date_timezone ORDER BY date")) {
try (ResultSet rs = s.executeQuery("SELECT DATE_PART('TZOFFSET', date) FROM test_date_timezone ORDER BY date")) {
for (int i = 0; i < datetimes.length; i++) {
assertEquals(2, rs.getFetchSize());
assertTrue("No more entries left at " + i, rs.next());
assertEquals(ZonedDateTime.ofInstant(Instant.ofEpochMilli(datetimes[i]), zoneId).getOffset()
.getTotalSeconds()/ 60, rs.getInt(1));
assertEquals(
ZonedDateTime.ofInstant(Instant.ofEpochMilli(datetimes[i]), zoneId).getOffset().getTotalSeconds() / 60,
rs.getInt(1)
);
}
assertFalse(rs.next());
}
@ -157,8 +152,7 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
* Test for {@code SELECT} that is implemented as an aggregation.
*/
public void testAggregation() throws SQLException {
try (Connection c = esJdbc();
Statement s = c.createStatement()) {
try (Connection c = esJdbc(); Statement s = c.createStatement()) {
s.setFetchSize(4);
try (ResultSet rs = s.executeQuery("SELECT test_field, COUNT(*) FROM test GROUP BY test_field")) {
for (int i = 0; i < 20; i++) {
@ -171,13 +165,12 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
}
}
}
/**
* Test for nested documents.
*/
public void testNestedDocuments() throws Exception {
try (Connection c = esJdbc();
Statement s = c.createStatement()) {
public void testNestedDocuments() throws SQLException {
try (Connection c = esJdbc(); Statement s = c.createStatement()) {
s.setFetchSize(5);
try (ResultSet rs = s.executeQuery("SELECT test_field, nested.* FROM test ORDER BY test_field ASC")) {
assertTrue("Empty result set!", rs.next());
@ -188,7 +181,6 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
assertFalse(rs.next());
}
}
assertNoSearchContexts();
}
private void assertNestedDocuments(ResultSet rs, int i) throws SQLException {
@ -196,24 +188,23 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
assertEquals(i, rs.getInt(1));
assertEquals(j, rs.getInt(2));
// don't check the very last row in the result set
assertTrue("No more entries left after row " + rs.getRow(), (i+j == 23 || rs.next()));
assertTrue("No more entries left after row " + rs.getRow(), (i + j == 23 || rs.next()));
}
}
/**
* Explicit pagination test for PIVOT.
* Checks that the paging properly consumes the necessary amount of aggregations and the
* page size affects the result not the intermediate query.
*/
public void testPivotPaging() throws Exception {
public void testPivotPaging() throws IOException, SQLException {
addPivotData();
try (Connection c = esJdbc();
Statement s = c.createStatement()) {
try (Connection c = esJdbc(); Statement s = c.createStatement()) {
String query = "SELECT * FROM "
+ "(SELECT item, amount, location FROM test_pivot)"
+ " PIVOT (AVG(amount) FOR location IN ( 'AF', 'AS', 'EU', 'NA', 'SA', 'AQ', 'AU') )";
+ "(SELECT item, amount, location FROM test_pivot)"
+ " PIVOT (AVG(amount) FOR location IN ( 'AF', 'AS', 'EU', 'NA', 'SA', 'AQ', 'AU') )";
// set size smaller than an agg page
s.setFetchSize(3);
try (ResultSet rs = s.executeQuery(query)) {
@ -226,7 +217,7 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
}
assertFalse(rs.next());
}
// now try with a larger fetch size (8 * 2 + something) - should be 2
s.setFetchSize(20);
try (ResultSet rs = s.executeQuery(query)) {
@ -239,20 +230,17 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
assertFalse(rs.next());
}
}
assertNoSearchContexts();
}
public void testPivotPagingWithLimit() throws Exception {
public void testPivotPagingWithLimit() throws IOException, SQLException {
addPivotData();
try (Connection c = esJdbc();
Statement s = c.createStatement()) {
try (Connection c = esJdbc(); Statement s = c.createStatement()) {
// run a query with a limit that is not a multiple of the fetch size
String query = "SELECT * FROM "
+ "(SELECT item, amount, location FROM test_pivot)"
+ " PIVOT (AVG(amount) FOR location IN ( 'EU', 'NA' ) ) LIMIT 5";
+ "(SELECT item, amount, location FROM test_pivot)"
+ " PIVOT (AVG(amount) FOR location IN ( 'EU', 'NA' ) ) LIMIT 5";
// set size smaller than an agg page
s.setFetchSize(20);
try (ResultSet rs = s.executeQuery(query)) {
@ -268,20 +256,24 @@ public class FetchSizeTestCase extends JdbcIntegrationTestCase {
assertFalse("LIMIT should be reached", rs.next());
}
}
assertNoSearchContexts();
}
private void addPivotData() throws Exception {
private void addPivotData() throws IOException {
Request request = new Request("PUT", "/test_pivot/_bulk");
request.addParameter("refresh", "true");
StringBuilder bulk = new StringBuilder();
String[] continent = new String[] { "AF", "AS", "EU", "NA", "SA", "AQ", "AU" };
for (int i = 0; i <= 100; i++) {
bulk.append("{\"index\":{}}\n");
bulk.append("{\"item\":").append(i % 10)
.append(", \"entry\":").append(i)
.append(", \"amount\" : ").append(randomInt(999))
.append(", \"location\" : \"").append(continent[i % (continent.length)]).append("\"")
bulk.append("{\"item\":")
.append(i % 10)
.append(", \"entry\":")
.append(i)
.append(", \"amount\" : ")
.append(randomInt(999))
.append(", \"location\" : \"")
.append(continent[i % (continent.length)])
.append("\"")
.append("}\n");
}
request.setJsonEntity(bulk.toString());

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.sql.qa.jdbc;
import org.elasticsearch.client.Request;
import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
@ -15,16 +16,15 @@ import static org.hamcrest.Matchers.startsWith;
/**
* Tests for exceptions and their messages.
*/
public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elasticsearch.xpack.sql.qa.ErrorsTestCase {
@Override
public void testSelectInvalidSql() throws Exception {
public abstract class JdbcErrorsTestCase extends JdbcIntegrationTestCase {
public void testSelectInvalidSql() throws SQLException {
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * FRO").executeQuery());
assertEquals("Found 1 problem\nline 1:8: Cannot determine columns for [*]", e.getMessage());
}
}
@Override
public void testSelectFromMissingIndex() throws SQLException {
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT * FROM test").executeQuery());
@ -32,16 +32,14 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast
}
}
@Override
public void testSelectColumnFromMissingIndex() throws Exception {
public void testSelectColumnFromMissingIndex() throws SQLException {
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT abc FROM test").executeQuery());
assertEquals("Found 1 problem\nline 1:17: Unknown index [test]", e.getMessage());
}
}
@Override
public void testSelectFromEmptyIndex() throws Exception {
public void testSelectFromEmptyIndex() throws IOException, SQLException {
// Create an index without any types
Request request = new Request("PUT", "/test");
request.setJsonEntity("{}");
@ -53,8 +51,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast
}
}
@Override
public void testSelectColumnFromEmptyIndex() throws Exception {
public void testSelectColumnFromEmptyIndex() throws IOException, SQLException {
Request request = new Request("PUT", "/test");
request.setJsonEntity("{}");
client().performRequest(request);
@ -65,8 +62,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast
}
}
@Override
public void testSelectMissingField() throws Exception {
public void testSelectMissingField() throws IOException, SQLException {
index("test", body -> body.field("test", "test"));
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT missing FROM test").executeQuery());
@ -74,8 +70,7 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast
}
}
@Override
public void testSelectMissingFunction() throws Exception {
public void testSelectMissingFunction() throws IOException, SQLException {
index("test", body -> body.field("foo", 1));
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT missing(foo) FROM test").executeQuery());
@ -83,64 +78,65 @@ public class ErrorsTestCase extends JdbcIntegrationTestCase implements org.elast
}
}
@Override
public void testSelectProjectScoreInAggContext() throws Exception {
public void testSelectProjectScoreInAggContext() throws IOException, SQLException {
index("test", body -> body.field("foo", 1));
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () ->
c.prepareStatement("SELECT foo, SCORE(), COUNT(*) FROM test GROUP BY foo").executeQuery());
SQLException e = expectThrows(
SQLException.class,
() -> c.prepareStatement("SELECT foo, SCORE(), COUNT(*) FROM test GROUP BY foo").executeQuery()
);
assertEquals("Found 1 problem\nline 1:13: Cannot use non-grouped column [SCORE()], expected [foo]", e.getMessage());
}
}
@Override
public void testSelectOrderByScoreInAggContext() throws Exception {
public void testSelectOrderByScoreInAggContext() throws IOException, SQLException {
index("test", body -> body.field("foo", 1));
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () ->
c.prepareStatement("SELECT foo, COUNT(*) FROM test GROUP BY foo ORDER BY SCORE()").executeQuery());
SQLException e = expectThrows(
SQLException.class,
() -> c.prepareStatement("SELECT foo, COUNT(*) FROM test GROUP BY foo ORDER BY SCORE()").executeQuery()
);
assertEquals(
"Found 1 problem\nline 1:54: Cannot order by non-grouped column [SCORE()], expected [foo] or an aggregate function",
e.getMessage());
"Found 1 problem\nline 1:54: Cannot order by non-grouped column [SCORE()], expected [foo] or an aggregate function",
e.getMessage()
);
}
}
@Override
public void testSelectGroupByScore() throws Exception {
public void testSelectGroupByScore() throws IOException, SQLException {
index("test", body -> body.field("foo", 1));
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () ->
c.prepareStatement("SELECT COUNT(*) FROM test GROUP BY SCORE()").executeQuery());
SQLException e = expectThrows(
SQLException.class,
() -> c.prepareStatement("SELECT COUNT(*) FROM test GROUP BY SCORE()").executeQuery()
);
assertEquals("Found 1 problem\nline 1:36: Cannot use [SCORE()] for grouping", e.getMessage());
}
}
@Override
public void testSelectScoreSubField() throws Exception {
public void testSelectScoreSubField() throws IOException, SQLException {
index("test", body -> body.field("foo", 1));
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () ->
c.prepareStatement("SELECT SCORE().bar FROM test").executeQuery());
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT SCORE().bar FROM test").executeQuery());
assertThat(e.getMessage(), startsWith("line 1:15: extraneous input '.' expecting {<EOF>, ','"));
}
}
@Override
public void testSelectScoreInScalar() throws Exception {
public void testSelectScoreInScalar() throws IOException, SQLException {
index("test", body -> body.field("foo", 1));
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () ->
c.prepareStatement("SELECT SIN(SCORE()) FROM test").executeQuery());
SQLException e = expectThrows(SQLException.class, () -> c.prepareStatement("SELECT SIN(SCORE()) FROM test").executeQuery());
assertThat(e.getMessage(), startsWith("Found 1 problem\nline 1:12: [SCORE()] cannot be an argument to a function"));
}
}
@Override
public void testHardLimitForSortOnAggregate() throws Exception {
public void testHardLimitForSortOnAggregate() throws IOException, SQLException {
index("test", body -> body.field("a", 1).field("b", 2));
try (Connection c = esJdbc()) {
SQLException e = expectThrows(SQLException.class, () ->
c.prepareStatement("SELECT max(a) max FROM test GROUP BY b ORDER BY max LIMIT 12000").executeQuery());
SQLException e = expectThrows(
SQLException.class,
() -> c.prepareStatement("SELECT max(a) max FROM test GROUP BY b ORDER BY max LIMIT 12000").executeQuery()
);
assertEquals("The maximum LIMIT for aggregate sorting is [10000], received [12000]", e.getMessage());
}
}

View File

@ -0,0 +1,177 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.xpack.sql.jdbc.EsDataSource;
import org.junit.After;
import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
public abstract class JdbcIntegrationTestCase extends ESRestTestCase {
public static final String JDBC_ES_URL_PREFIX = "jdbc:es://";
@After
public void checkSearchContent() throws IOException {
// Some context might linger due to fire and forget nature of scroll cleanup
assertNoSearchContexts();
}
/**
* Read an address for Elasticsearch suitable for the JDBC driver from the system properties.
*/
public static String elasticsearchAddress() {
String cluster = System.getProperty("tests.rest.cluster");
// JDBC only supports a single node at a time so we just give it one.
return cluster.split(",")[0];
/* This doesn't include "jdbc:es://" because we want the example in
* esJdbc to be obvious and because we want to use getProtocol to add
* https if we are running against https. */
}
public Connection esJdbc() throws SQLException {
return esJdbc(connectionProperties());
}
public Connection esJdbc(Properties props) throws SQLException {
return createConnection(props);
}
protected Connection createConnection(Properties connectionProperties) throws SQLException {
String elasticsearchAddress = getProtocol() + "://" + elasticsearchAddress();
String address = JDBC_ES_URL_PREFIX + elasticsearchAddress;
Connection connection;
if (randomBoolean()) {
connection = DriverManager.getConnection(address, connectionProperties);
} else {
EsDataSource dataSource = new EsDataSource();
dataSource.setUrl(address);
dataSource.setProperties(connectionProperties);
connection = dataSource.getConnection();
}
assertNotNull("The timezone should be specified", connectionProperties.getProperty("timezone"));
return connection;
}
//
// methods below are used inside the documentation only
//
protected Connection useDriverManager() throws SQLException {
String elasticsearchAddress = getProtocol() + "://" + elasticsearchAddress();
// tag::connect-dm
String address = "jdbc:es://" + elasticsearchAddress; // <1>
Properties connectionProperties = connectionProperties(); // <2>
Connection connection =
DriverManager.getConnection(address, connectionProperties);
// end::connect-dm
assertNotNull("The timezone should be specified", connectionProperties.getProperty("timezone"));
return connection;
}
protected Connection useDataSource() throws SQLException {
String elasticsearchAddress = getProtocol() + "://" + elasticsearchAddress();
// tag::connect-ds
EsDataSource dataSource = new EsDataSource();
String address = "jdbc:es://" + elasticsearchAddress; // <1>
dataSource.setUrl(address);
Properties connectionProperties = connectionProperties(); // <2>
dataSource.setProperties(connectionProperties);
Connection connection = dataSource.getConnection();
// end::connect-ds
assertNotNull("The timezone should be specified", connectionProperties.getProperty("timezone"));
return connection;
}
public static void index(String index, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
index(index, "1", body);
}
public static void index(String index, String documentId, CheckedConsumer<XContentBuilder, IOException> body) throws IOException {
Request request = new Request("PUT", "/" + index + "/_doc/" + documentId);
request.addParameter("refresh", "true");
XContentBuilder builder = JsonXContent.contentBuilder().startObject();
body.accept(builder);
builder.endObject();
request.setJsonEntity(Strings.toString(builder));
client().performRequest(request);
}
public static void delete(String index, String documentId) throws IOException {
Request request = new Request("DELETE", "/" + index + "/_doc/" + documentId);
request.addParameter("refresh", "true");
client().performRequest(request);
}
/**
* The properties used to build the connection.
*/
protected Properties connectionProperties() {
Properties connectionProperties = new Properties();
connectionProperties.put(JdbcTestUtils.JDBC_TIMEZONE, randomKnownTimeZone());
// in the tests, don't be lenient towards multi values
connectionProperties.put("field.multi.value.leniency", "false");
return connectionProperties;
}
public static String randomKnownTimeZone() {
// We use system default timezone for the connection that is selected randomly by TestRuleSetupAndRestoreClassEnv
// from all available JDK timezones. While Joda and JDK are generally in sync, some timezones might not be known
// to the current version of Joda and in this case the test might fail. To avoid that, we specify a timezone
// known for both Joda and JDK
Set<String> timeZones = new HashSet<>(JODA_TIMEZONE_IDS);
timeZones.retainAll(JAVA_TIMEZONE_IDS);
List<String> ids = new ArrayList<>(timeZones);
Collections.sort(ids);
return randomFrom(ids);
}
private static Map<String, Object> searchStats() throws IOException {
Response response = client().performRequest(new Request("GET", "/_stats/search"));
try (InputStream content = response.getEntity().getContent()) {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false);
}
}
@SuppressWarnings("unchecked")
private static int getOpenContexts(Map<String, Object> stats, String index) {
stats = (Map<String, Object>) stats.get("indices");
stats = (Map<String, Object>) stats.get(index);
stats = (Map<String, Object>) stats.get("total");
stats = (Map<String, Object>) stats.get("search");
return (Integer) stats.get("open_contexts");
}
static void assertNoSearchContexts() throws IOException {
Map<String, Object> stats = searchStats();
@SuppressWarnings("unchecked")
Map<String, Object> indicesStats = (Map<String, Object>) stats.get("indices");
for (String index : indicesStats.keySet()) {
if (index.startsWith(".") == false) { // We are not interested in internal indices
assertEquals(index + " should have no search contexts", 0, getOpenContexts(stats, index));
}
}
}
}

View File

@ -11,7 +11,7 @@ import java.sql.SQLException;
import static org.hamcrest.Matchers.startsWith;
public class JdbcNoSqlTestCase extends JdbcIntegrationTestCase {
public abstract class JdbcNoSqlTestCase extends JdbcIntegrationTestCase {
public void testJdbcExceptionMessage() throws SQLException {
try (Connection c = esJdbc()) {

View File

@ -0,0 +1,60 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.jdbc;
import org.elasticsearch.xpack.sql.proto.StringUtils;
import java.sql.Date;
import java.sql.Time;
import java.time.Instant;
import java.time.LocalDate;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Calendar;
final class JdbcTestUtils {
private JdbcTestUtils() {}
static final ZoneId UTC = ZoneId.of("Z");
static final String JDBC_TIMEZONE = "timezone";
static final LocalDate EPOCH = LocalDate.of(1970, 1, 1);
static String of(long millis, String zoneId) {
return StringUtils.toString(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.of(zoneId)));
}
static Date asDate(long millis, ZoneId zoneId) {
return new java.sql.Date(
ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), zoneId).toLocalDate().atStartOfDay(zoneId).toInstant().toEpochMilli()
);
}
static Time asTime(long millis, ZoneId zoneId) {
return new Time(
ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), zoneId)
.toLocalTime()
.atDate(JdbcTestUtils.EPOCH)
.atZone(zoneId)
.toInstant()
.toEpochMilli()
);
}
static long convertFromCalendarToUTC(long value, Calendar cal) {
if (cal == null) {
return value;
}
Calendar c = (Calendar) cal.clone();
c.setTimeInMillis(value);
ZonedDateTime convertedDateTime = ZonedDateTime.ofInstant(c.toInstant(), c.getTimeZone().toZoneId())
.withZoneSameLocal(ZoneOffset.UTC);
return convertedDateTime.toInstant().toEpochMilli();
}
}

View File

@ -27,14 +27,10 @@ import java.util.Calendar;
import java.util.Locale;
import java.util.StringJoiner;
import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.UTC;
import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.asDate;
import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.asTime;
import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.convertFromCalendarToUTC;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.startsWith;
public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
public abstract class PreparedStatementTestCase extends JdbcIntegrationTestCase {
public void testSupportedTypes() throws SQLException {
String stringVal = randomAlphaOfLength(randomIntBetween(0, 1000));
@ -49,13 +45,19 @@ public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
long millis = randomNonNegativeLong();
Calendar calendarVal = Calendar.getInstance(randomTimeZone(), Locale.ROOT);
Timestamp timestampVal = new Timestamp(millis);
Timestamp timestampValWithCal = new Timestamp(convertFromCalendarToUTC(timestampVal.getTime(), calendarVal));
Date dateVal = asDate(millis, UTC);
Date dateValWithCal = asDate(convertFromCalendarToUTC(dateVal.getTime(), calendarVal), UTC);
Time timeVal = asTime(millis, UTC);
Time timeValWithCal = asTime(convertFromCalendarToUTC(timeVal.getTime(), calendarVal), UTC);
Timestamp timestampValWithCal = new Timestamp(JdbcTestUtils.convertFromCalendarToUTC(timestampVal.getTime(), calendarVal));
Date dateVal = JdbcTestUtils.asDate(millis, JdbcTestUtils.UTC);
Date dateValWithCal = JdbcTestUtils.asDate(
JdbcTestUtils.convertFromCalendarToUTC(dateVal.getTime(), calendarVal),
JdbcTestUtils.UTC
);
Time timeVal = JdbcTestUtils.asTime(millis, JdbcTestUtils.UTC);
Time timeValWithCal = JdbcTestUtils.asTime(
JdbcTestUtils.convertFromCalendarToUTC(timeVal.getTime(), calendarVal),
JdbcTestUtils.UTC
);
java.util.Date utilDateVal = new java.util.Date(millis);
LocalDateTime localDateTimeVal = LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), UTC);
LocalDateTime localDateTimeVal = LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), JdbcTestUtils.UTC);
try (Connection connection = esJdbc()) {
StringJoiner sql = new StringJoiner(",", "SELECT ", "");
@ -140,10 +142,13 @@ public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
setupIndexForDateTimeTests(randomMillis);
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT id, birth_date FROM emps WHERE birth_date::date = ? " +
"ORDER BY id")) {
try (
PreparedStatement statement = connection.prepareStatement(
"SELECT id, birth_date FROM emps WHERE birth_date::date = ? " + "ORDER BY id"
)
) {
statement.setDate(1, new Date(asDate(randomMillis, UTC).getTime()));
statement.setDate(1, new Date(JdbcTestUtils.asDate(randomMillis, JdbcTestUtils.UTC).getTime()));
try (ResultSet results = statement.executeQuery()) {
for (int i = 1; i <= 3; i++) {
assertTrue(results.next());
@ -162,7 +167,7 @@ public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement("SELECT id, birth_date FROM emps WHERE birth_date::time = ?")) {
Time time = JdbcTestUtils.asTime(randomMillis, UTC);
Time time = JdbcTestUtils.asTime(randomMillis, JdbcTestUtils.UTC);
statement.setObject(1, time);
try (ResultSet results = statement.executeQuery()) {
assertTrue(results.next());
@ -184,7 +189,7 @@ public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
}
}
public void testUnsupportedParameterUse() throws Exception {
public void testUnsupportedParameterUse() throws IOException, SQLException {
index("library", builder -> {
builder.field("name", "Don Quixote");
builder.field("page_count", 1072);
@ -202,7 +207,7 @@ public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
}
}
public void testTooMayParameters() throws Exception {
public void testTooMayParameters() throws IOException, SQLException {
index("library", builder -> {
builder.field("name", "Don Quixote");
builder.field("page_count", 1072);
@ -221,10 +226,9 @@ public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
}
}
public void testStringEscaping() throws Exception {
public void testStringEscaping() throws SQLException {
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement(
"SELECT ?, ?, ?, ?")) {
try (PreparedStatement statement = connection.prepareStatement("SELECT ?, ?, ?, ?")) {
statement.setString(1, "foo --");
statement.setString(2, "/* foo */");
statement.setString(3, "\"foo");
@ -246,10 +250,9 @@ public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
}
}
public void testCommentsHandling() throws Exception {
public void testCommentsHandling() throws SQLException {
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement(
"SELECT ?, /* ?, */ ? -- ?")) {
try (PreparedStatement statement = connection.prepareStatement("SELECT ?, /* ?, */ ? -- ?")) {
assertEquals(2, statement.getParameterMetaData().getParameterCount());
statement.setString(1, "foo");
statement.setString(2, "bar");
@ -265,7 +268,7 @@ public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
}
}
public void testSingleParameterMultipleTypes() throws Exception {
public void testSingleParameterMultipleTypes() throws SQLException {
String stringVal = randomAlphaOfLength(randomIntBetween(0, 1000));
int intVal = randomInt();
long longVal = randomLong();
@ -300,7 +303,7 @@ public class PreparedStatementTestCase extends JdbcIntegrationTestCase {
}
}
private Tuple<Integer, Object> execute(PreparedStatement statement) throws Exception {
private Tuple<Integer, Object> execute(PreparedStatement statement) throws SQLException {
try (ResultSet results = statement.executeQuery()) {
ResultSetMetaData resultSetMetaData = results.getMetaData();
assertTrue(results.next());

View File

@ -8,34 +8,43 @@ package org.elasticsearch.xpack.sql.qa.jdbc;
import org.elasticsearch.common.CheckedConsumer;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
public class ResultSetMetaDataTestCase extends JdbcIntegrationTestCase {
private final String[] fieldsNames = new String[] {"test_byte", "test_integer", "test_long", "test_short",
"test_double", "test_float", "test_keyword", "test_boolean", "test_date"};
public void testValidGetObjectCalls() throws Exception {
public abstract class ResultSetMetaDataTestCase extends JdbcIntegrationTestCase {
private final String[] fieldsNames = new String[] {
"test_byte",
"test_integer",
"test_long",
"test_short",
"test_double",
"test_float",
"test_keyword",
"test_boolean",
"test_date" };
public void testValidGetObjectCalls() throws IOException, SQLException {
ResultSetTestCase.createIndex("test");
ResultSetTestCase.updateMapping("test", builder -> {
for(String field : fieldsNames) {
for (String field : fieldsNames) {
builder.startObject(field).field("type", field.substring(5)).endObject();
}
});
String q = "SELECT test_byte, test_integer, test_long, test_short, test_double, test_float, test_keyword, "
+ "test_boolean, test_date FROM test";
doWithQuery(q, (r) -> assertColumnNamesAndLabels(r.getMetaData(), fieldsNames));
+ "test_boolean, test_date FROM test";
doWithQuery(q, r -> assertColumnNamesAndLabels(r.getMetaData(), fieldsNames));
q = "SELECT test_byte AS b, test_integer AS i, test_long AS l, test_short AS s, test_double AS d, test_float AS f, "
+ "test_keyword AS k, test_boolean AS bool, test_date AS dt FROM test";
doWithQuery(q, (r) -> assertColumnNamesAndLabels(r.getMetaData(), new String[] {"b", "i", "l", "s", "d", "f", "k", "bool", "dt"}));
+ "test_keyword AS k, test_boolean AS bool, test_date AS dt FROM test";
doWithQuery(q, r -> assertColumnNamesAndLabels(r.getMetaData(), new String[] { "b", "i", "l", "s", "d", "f", "k", "bool", "dt" }));
}
private void doWithQuery(String query, CheckedConsumer<ResultSet, SQLException> consumer) throws SQLException {
try (Connection connection = esJdbc()) {
try (PreparedStatement statement = connection.prepareStatement(query)) {
@ -46,11 +55,11 @@ public class ResultSetMetaDataTestCase extends JdbcIntegrationTestCase {
}
}
}
private void assertColumnNamesAndLabels(ResultSetMetaData metadata, String[] names) throws SQLException {
for(int i = 0; i < fieldsNames.length; i++) {
assertEquals(names[i], metadata.getColumnName(i + 1));
assertEquals(names[i], metadata.getColumnLabel(i + 1));
private void assertColumnNamesAndLabels(ResultSetMetaData metaData, String[] names) throws SQLException {
for (int i = 0; i < fieldsNames.length; i++) {
assertEquals(names[i], metaData.getColumnName(i + 1));
assertEquals(names[i], metaData.getColumnLabel(i + 1));
}
}
}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.sql.qa.jdbc;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
@ -12,8 +13,9 @@ import java.sql.Statement;
import static org.hamcrest.Matchers.containsString;
public class SimpleExampleTestCase extends JdbcIntegrationTestCase {
public void testSimpleExample() throws Exception {
public abstract class SimpleExampleTestCase extends JdbcIntegrationTestCase {
public void testSimpleExample() throws SQLException, IOException {
index("library", builder -> {
builder.field("name", "Don Quixote");
builder.field("page_count", 1072);
@ -22,10 +24,10 @@ public class SimpleExampleTestCase extends JdbcIntegrationTestCase {
// tag::simple_example
try (Statement statement = connection.createStatement();
ResultSet results = statement.executeQuery(
" SELECT name, page_count"
+ " FROM library"
" SELECT name, page_count"
+ " FROM library"
+ " ORDER BY page_count DESC"
+ " LIMIT 1")) {
+ " LIMIT 1")) {
assertTrue(results.next());
assertEquals("Don Quixote", results.getString(1));
assertEquals(1072, results.getInt(2));

View File

@ -6,6 +6,6 @@
/**
* Support for integration tests for the Elasticsearch SQL JDBC client
* and integration tests shared between multiple qa projects.
* and integration tests shared between multiple qa projects.
*/
package org.elasticsearch.xpack.sql.qa.jdbc;

View File

@ -1,30 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.qa.security;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.xpack.sql.qa.jdbc.ErrorsTestCase;
import java.util.Properties;
public class JdbcErrorsIT extends ErrorsTestCase {
@Override
protected Settings restClientSettings() {
return RestSqlIT.securitySettings();
}
@Override
protected String getProtocol() {
return RestSqlIT.SSL_ENABLED ? "https" : "http";
}
@Override
protected Properties connectionProperties() {
Properties properties = super.connectionProperties();
properties.putAll(JdbcSecurityIT.adminProperties());
return properties;
}
}

View File

@ -0,0 +1,115 @@
description = 'Integration tests for SQL'
apply plugin: 'elasticsearch.build'
// the main files are actually test files, so use the appropriate forbidden api sigs
tasks.named('forbiddenApisMain').configure {
replaceSignatureFiles 'es-all-signatures', 'es-test-signatures'
}
dependencies {
compile project(":test:framework")
// JDBC testing dependencies
compile project(path: xpackModule('sql:jdbc'))
compile "net.sourceforge.csvjdbc:csvjdbc:${csvjdbcVersion}"
// CLI testing dependencies
compile project(path: xpackModule('sql:sql-cli'))
// H2GIS testing dependencies
compile("org.orbisgis:h2gis:${h2gisVersion}") {
exclude group: "org.locationtech.jts"
}
// select just the parts of JLine that are needed
compile("org.jline:jline-terminal-jna:${jlineVersion}") {
exclude group: "net.java.dev.jna"
}
compile "org.jline:jline-terminal:${jlineVersion}"
compile "org.jline:jline-reader:${jlineVersion}"
compile "org.jline:jline-style:${jlineVersion}"
testRuntime "org.elasticsearch:jna:${versions.jna}"
}
/* disable unit tests because these are all integration tests used
* other qa projects. */
test.enabled = false
dependencyLicenses.enabled = false
dependenciesInfo.enabled = false
// just a test fixture: we aren't using this jars in releases and H2GIS requires disabling a lot of checks
thirdPartyAudit.enabled = false
subprojects {
if (subprojects.isEmpty()) {
// leaf project
apply plugin: 'elasticsearch.standalone-rest-test'
} else {
apply plugin: 'elasticsearch.build'
}
dependencies {
configurations.testRuntimeClasspath {
resolutionStrategy.force "org.slf4j:slf4j-api:1.7.25"
}
configurations.testRuntime {
// This is also required to make resolveAllDependencies work
resolutionStrategy.force "org.slf4j:slf4j-api:1.7.25"
}
/* Since we're a standalone rest test we actually get transitive
* dependencies but we don't really want them because they cause
* all kinds of trouble with the jar hell checks. So we suppress
* them explicitly for non-es projects. */
testCompile(xpackProject('plugin:sql:qa:server')) {
transitive = false
}
testCompile project(":test:framework")
// JDBC testing dependencies
testRuntime "net.sourceforge.csvjdbc:csvjdbc:${csvjdbcVersion}"
testRuntime "com.h2database:h2:${h2Version}"
// H2GIS testing dependencies
testRuntime("org.orbisgis:h2gis:${h2gisVersion}") {
exclude group: "org.locationtech.jts"
exclude group: "com.fasterxml.jackson.core"
}
testRuntime project(path: xpackModule('sql:jdbc'))
testRuntime xpackProject('plugin:sql:sql-client')
// CLI testing dependencies
testRuntime project(path: xpackModule('sql:sql-cli'))
testRuntime(xpackProject('plugin:sql:sql-action')) {
transitive = false
}
testRuntime("org.jline:jline-terminal-jna:${jlineVersion}") {
exclude group: "net.java.dev.jna"
}
testRuntime "org.jline:jline-terminal:${jlineVersion}"
testRuntime "org.jline:jline-reader:${jlineVersion}"
testRuntime "org.jline:jline-style:${jlineVersion}"
testRuntime "org.elasticsearch:jna:${versions.jna}"
// spatial dependency
testRuntime project(path: xpackModule('spatial'))
}
if (project.name != 'security') {
// The security project just configures its subprojects
apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.rest-test'
testClusters.integTest {
testDistribution = 'DEFAULT'
setting 'xpack.ml.enabled', 'false'
setting 'xpack.watcher.enabled', 'false'
}
}
}

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.multi_node;
import org.elasticsearch.xpack.sql.qa.cli.SelectTestCase;
public class CliSelectIT extends SelectTestCase {
}
public class CliSelectIT extends SelectTestCase {}

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.multi_node;
import org.elasticsearch.xpack.sql.qa.cli.ShowTestCase;
public class CliShowIT extends ShowTestCase {
}
public class CliShowIT extends ShowTestCase {}

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.multi_node;
import org.elasticsearch.xpack.sql.qa.jdbc.DatabaseMetaDataTestCase;
public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase {
}
public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase {}

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.multi_node;
import org.elasticsearch.xpack.sql.qa.jdbc.ShowTablesTestCase;
public class JdbcShowTablesIT extends ShowTablesTestCase {
}
public class JdbcShowTablesIT extends ShowTablesTestCase {}

View File

@ -11,5 +11,4 @@ import org.elasticsearch.xpack.sql.qa.rest.RestSqlTestCase;
* Integration test for the rest sql action. The one that speaks json directly to a
* user rather than to the JDBC driver or CLI.
*/
public class RestSqlIT extends RestSqlTestCase {
}
public class RestSqlIT extends RestSqlTestCase {}

View File

@ -66,7 +66,8 @@ public class RestSqlMultinodeIT extends ESRestTestCase {
assertNotNull("Didn't find first host among published addresses", firstHostName);
XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject();
index.startObject("settings"); {
index.startObject("settings");
{
index.field("routing.allocation.exclude._name", firstHostName);
}
index.endObject();
@ -77,7 +78,7 @@ public class RestSqlMultinodeIT extends ESRestTestCase {
int documents = between(10, 100);
createTestData(documents);
try (RestClient firstNodeClient = buildClient(restClientSettings(), new HttpHost[] {firstHost})) {
try (RestClient firstNodeClient = buildClient(restClientSettings(), new HttpHost[] { firstHost })) {
assertCount(firstNodeClient, documents);
}
}

View File

@ -8,5 +8,4 @@ package org.elasticsearch.xpack.sql.qa.multi_node;
import org.elasticsearch.xpack.sql.qa.SqlProtocolTestCase;
public class SqlProtocolIT extends SqlProtocolTestCase {
}
public class SqlProtocolIT extends SqlProtocolTestCase {}

View File

@ -4,8 +4,6 @@ dependencies {
Project mainProject = project
group = "${group}.x-pack.qa.sql.security"
configurations.create('testArtifacts')
TaskProvider testJar = tasks.register("testJar", Jar) {

View File

@ -0,0 +1,91 @@
# tag::rest
rest_minimal:
indices:
- names: test
privileges: [read, "indices:admin/get"]
- names: bort
privileges: [read, "indices:admin/get"]
# end::rest
# tag::cli_drivers
cli_or_drivers_minimal:
cluster:
- "cluster:monitor/main"
indices:
- names: test
privileges: [read, "indices:admin/get"]
- names: bort
privileges: [read, "indices:admin/get"]
# end::cli_drivers
read_nothing:
cluster:
- "cluster:monitor/main"
read_something_else:
cluster:
- "cluster:monitor/main"
indices:
- names: something_that_isnt_test
privileges: [read, "indices:admin/get"]
read_test_a:
cluster:
- "cluster:monitor/main"
indices:
- names: test
privileges: [read, "indices:admin/get"]
field_security:
grant: [a]
read_test_a_and_b:
cluster:
- "cluster:monitor/main"
indices:
- names: test
privileges: [read, "indices:admin/get"]
field_security:
grant: ["*"]
except: [c]
read_test_without_c_3:
cluster:
- "cluster:monitor/main"
indices:
- names: test
privileges: [read, "indices:admin/get"]
query: |
{
"bool": {
"must_not": [
{
"match": {
"c": 3
}
}
]
}
}
read_bort:
cluster:
- "cluster:monitor/main"
indices:
- names: bort
privileges: [read, "indices:admin/get"]
no_monitor_main:
indices:
- names: test
privileges: [read, "indices:admin/get"]
- names: bort
privileges: [read, "indices:admin/get"]
no_get_index:
cluster:
- "cluster:monitor/main"
indices:
- names: test
privileges: [monitor]
- names: bort
privileges: [monitor]

View File

@ -84,13 +84,15 @@ public class CliSecurityIT extends SqlSecurityTestCase {
public void expectScrollMatchesAdmin(String adminSql, String user, String userSql) throws Exception {
expectMatchesAdmin(adminSql, user, userSql, cli -> {
assertEquals("[?1l>[?1000l[?2004lfetch size set to [90m1[0m", cli.command("fetch size = 1"));
assertEquals("[?1l>[?1000l[?2004lfetch separator set to \"[90m -- fetch sep -- [0m\"",
cli.command("fetch separator = \" -- fetch sep -- \""));
assertEquals(
"[?1l>[?1000l[?2004lfetch separator set to \"[90m -- fetch sep -- [0m\"",
cli.command("fetch separator = \" -- fetch sep -- \"")
);
});
}
public void expectMatchesAdmin(String adminSql, String user, String userSql,
CheckedConsumer<EmbeddedCli, Exception> customizer) throws Exception {
public void expectMatchesAdmin(String adminSql, String user, String userSql, CheckedConsumer<EmbeddedCli, Exception> customizer)
throws Exception {
List<String> adminResult = new ArrayList<>();
try (EmbeddedCli cli = new EmbeddedCli(elasticsearchAddress(), true, adminSecurityConfig())) {
customizer.accept(cli);

View File

@ -82,10 +82,12 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
properties.put("ssl.truststore.pass", "keypass");
}
static void expectActionMatchesAdmin(CheckedFunction<Connection, ResultSet, SQLException> adminAction,
String user, CheckedFunction<Connection, ResultSet, SQLException> userAction) throws Exception {
try (Connection adminConnection = es(adminProperties());
Connection userConnection = es(userProperties(user))) {
static void expectActionMatchesAdmin(
CheckedFunction<Connection, ResultSet, SQLException> adminAction,
String user,
CheckedFunction<Connection, ResultSet, SQLException> userAction
) throws Exception {
try (Connection adminConnection = es(adminProperties()); Connection userConnection = es(userProperties(user))) {
assertResultSets(adminAction.apply(adminConnection), userAction.apply(userConnection));
}
}
@ -106,8 +108,8 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
assertThat(e.getMessage(), containsString(errorMessage));
}
static void expectActionThrowsUnknownColumn(String user,
CheckedConsumer<Connection, SQLException> action, String column) throws Exception {
static void expectActionThrowsUnknownColumn(String user, CheckedConsumer<Connection, SQLException> action, String column)
throws Exception {
SQLException e;
try (Connection connection = es(userProperties(user))) {
e = expectThrows(SQLException.class, () -> action.accept(connection));
@ -123,8 +125,7 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
@Override
public void queryWorksAsAdmin() throws Exception {
try (Connection h2 = LocalH2.anonymousDb();
Connection es = es(adminProperties())) {
try (Connection h2 = LocalH2.anonymousDb(); Connection es = es(adminProperties())) {
h2.createStatement().executeUpdate("CREATE TABLE test (a BIGINT, b BIGINT, c BIGINT)");
h2.createStatement().executeUpdate("INSERT INTO test (a, b, c) VALUES (1, 2, 3), (4, 5, 6)");
@ -138,29 +139,26 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
expectActionMatchesAdmin(
con -> con.createStatement().executeQuery(adminSql),
user,
con -> con.createStatement().executeQuery(userSql));
con -> con.createStatement().executeQuery(userSql)
);
}
@Override
public void expectScrollMatchesAdmin(String adminSql, String user, String userSql) throws Exception {
expectActionMatchesAdmin(
con -> {
Statement st = con.createStatement();
st.setFetchSize(1);
return st.executeQuery(adminSql);
},
user,
con -> {
Statement st = con.createStatement();
st.setFetchSize(1);
return st.executeQuery(userSql);
});
expectActionMatchesAdmin(con -> {
Statement st = con.createStatement();
st.setFetchSize(1);
return st.executeQuery(adminSql);
}, user, con -> {
Statement st = con.createStatement();
st.setFetchSize(1);
return st.executeQuery(userSql);
});
}
@Override
public void expectDescribe(Map<String, List<String>> columns, String user) throws Exception {
try (Connection h2 = LocalH2.anonymousDb();
Connection es = es(userProperties(user))) {
try (Connection h2 = LocalH2.anonymousDb(); Connection es = es(userProperties(user))) {
// h2 doesn't have the same sort of DESCRIBE that we have so we emulate it
h2.createStatement().executeUpdate("CREATE TABLE mock (column VARCHAR, type VARCHAR, mapping VARCHAR)");
if (columns.size() > 0) {
@ -222,10 +220,7 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
@Override
public void expectUnknownColumn(String user, String sql, String column) throws Exception {
expectActionThrowsUnknownColumn(
user,
con -> con.createStatement().executeQuery(sql),
column);
expectActionThrowsUnknownColumn(user, con -> con.createStatement().executeQuery(sql), column);
}
@Override
@ -236,12 +231,12 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
expectUnauthorized("cluster:monitor/main", user, () -> es(userProperties(user)).getMetaData().getDatabaseMinorVersion());
// by moving to field caps these calls do not require the monitor permission
// expectUnauthorized("cluster:monitor/main", user,
// () -> es(userProperties(user)).createStatement().executeQuery("SELECT * FROM test"));
// expectUnauthorized("cluster:monitor/main", user,
// () -> es(userProperties(user)).createStatement().executeQuery("SHOW TABLES LIKE 'test'"));
// expectUnauthorized("cluster:monitor/main", user,
// () -> es(userProperties(user)).createStatement().executeQuery("DESCRIBE test"));
// expectUnauthorized("cluster:monitor/main", user,
// () -> es(userProperties(user)).createStatement().executeQuery("SELECT * FROM test"));
// expectUnauthorized("cluster:monitor/main", user,
// () -> es(userProperties(user)).createStatement().executeQuery("SHOW TABLES LIKE 'test'"));
// expectUnauthorized("cluster:monitor/main", user,
// () -> es(userProperties(user)).createStatement().executeQuery("DESCRIBE test"));
}
private void expectUnauthorized(String action, String user, ThrowingRunnable r) {
@ -261,7 +256,8 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
expectActionMatchesAdmin(
con -> con.getMetaData().getTables("%", "%", "%t", null),
"full_access",
con -> con.getMetaData().getTables("%", "%", "%", null));
con -> con.getMetaData().getTables("%", "%", "%", null)
);
}
public void testMetaDataGetTablesWithNoAccess() throws Exception {
@ -276,7 +272,8 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
expectActionMatchesAdmin(
con -> con.getMetaData().getTables("%", "%", "bort", null),
"read_bort",
con -> con.getMetaData().getTables("%", "%", "%", null));
con -> con.getMetaData().getTables("%", "%", "%", null)
);
}
public void testMetaDataGetTablesWithInAccessibleIndex() throws Exception {
@ -285,16 +282,18 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
expectActionMatchesAdmin(
con -> con.getMetaData().getTables("%", "%", "not_created", null),
"read_bort",
con -> con.getMetaData().getTables("%", "%", "test", null));
con -> con.getMetaData().getTables("%", "%", "test", null)
);
}
public void testMetaDataGetColumnsWorksAsFullAccess() throws Exception {
createUser("full_access", "cli_or_drivers_minimal");
expectActionMatchesAdmin(
con -> con.getMetaData().getColumns(null, "%", "%t", "%"),
con -> con.getMetaData().getColumns(null, "%", "%t", "%"),
"full_access",
con -> con.getMetaData().getColumns(null, "%", "%t", "%"));
con -> con.getMetaData().getColumns(null, "%", "%t", "%")
);
}
public void testMetaDataGetColumnsWithNoAccess() throws Exception {
@ -307,18 +306,20 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
createUser("wrong_access", "read_something_else");
expectActionMatchesAdmin(
con -> con.getMetaData().getColumns(null, "%", "not_created", "%"),
con -> con.getMetaData().getColumns(null, "%", "not_created", "%"),
"wrong_access",
con -> con.getMetaData().getColumns(null, "%", "test", "%"));
con -> con.getMetaData().getColumns(null, "%", "test", "%")
);
}
public void testMetaDataGetColumnsSingleFieldGranted() throws Exception {
createUser("only_a", "read_test_a");
expectActionMatchesAdmin(
con -> con.getMetaData().getColumns(null, "%", "test", "a"),
con -> con.getMetaData().getColumns(null, "%", "test", "a"),
"only_a",
con -> con.getMetaData().getColumns(null, "%", "test", "%"));
con -> con.getMetaData().getColumns(null, "%", "test", "%")
);
}
public void testMetaDataGetColumnsSingleFieldExcepted() throws Exception {
@ -345,6 +346,7 @@ public class JdbcSecurityIT extends SqlSecurityTestCase {
expectActionMatchesAdmin(
con -> con.getMetaData().getColumns(null, "%", "test", "%"),
"no_3s",
con -> con.getMetaData().getColumns(null, "%", "test", "%"));
con -> con.getMetaData().getColumns(null, "%", "test", "%")
);
}
}

View File

@ -28,8 +28,7 @@ public class RestSqlIT extends RestSqlTestCase {
static Settings securitySettings() {
String token = basicAuthHeaderValue("test_admin", new SecureString("x-pack-test-password".toCharArray()));
Settings.Builder builder = Settings.builder()
.put(ThreadContext.PREFIX + ".Authorization", token);
Settings.Builder builder = Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token);
if (SSL_ENABLED) {
Path keyStore;
try {
@ -40,8 +39,7 @@ public class RestSqlIT extends RestSqlTestCase {
if (!Files.exists(keyStore)) {
throw new IllegalStateException("Keystore file [" + keyStore + "] does not exist.");
}
builder.put(ESRestTestCase.TRUSTSTORE_PATH, keyStore)
.put(ESRestTestCase.TRUSTSTORE_PASSWORD, "keypass");
builder.put(ESRestTestCase.TRUSTSTORE_PATH, keyStore).put(ESRestTestCase.TRUSTSTORE_PASSWORD, "keypass");
}
return builder.build();
}

View File

@ -50,13 +50,15 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
public void queryWorksAsAdmin() throws Exception {
String mode = randomMode();
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo(mode, "a", "long", JDBCType.BIGINT, 20),
columnInfo(mode, "b", "long", JDBCType.BIGINT, 20),
columnInfo(mode, "c", "long", JDBCType.BIGINT, 20)));
expected.put("rows", Arrays.asList(
Arrays.asList(1, 2, 3),
Arrays.asList(4, 5, 6)));
columnInfo(mode, "c", "long", JDBCType.BIGINT, 20)
)
);
expected.put("rows", Arrays.asList(Arrays.asList(1, 2, 3), Arrays.asList(4, 5, 6)));
assertResponse(expected, runSql(null, mode, "SELECT * FROM test ORDER BY a"));
}
@ -70,10 +72,16 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
@Override
public void expectScrollMatchesAdmin(String adminSql, String user, String userSql) throws Exception {
String mode = randomMode();
Map<String, Object> adminResponse = runSql(null,
new StringEntity(query(adminSql).mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON), mode);
Map<String, Object> otherResponse = runSql(user,
new StringEntity(query(adminSql).mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON), mode);
Map<String, Object> adminResponse = runSql(
null,
new StringEntity(query(adminSql).mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON),
mode
);
Map<String, Object> otherResponse = runSql(
user,
new StringEntity(query(adminSql).mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON),
mode
);
String adminCursor = (String) adminResponse.remove("cursor");
String otherCursor = (String) otherResponse.remove("cursor");
@ -81,10 +89,16 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
assertNotNull(otherCursor);
assertResponse(adminResponse, otherResponse);
while (true) {
adminResponse = runSql(null, new StringEntity(cursor(adminCursor).mode(mode).toString(),
ContentType.APPLICATION_JSON), mode);
otherResponse = runSql(user, new StringEntity(cursor(otherCursor).mode(mode).toString(),
ContentType.APPLICATION_JSON), mode);
adminResponse = runSql(
null,
new StringEntity(cursor(adminCursor).mode(mode).toString(), ContentType.APPLICATION_JSON),
mode
);
otherResponse = runSql(
user,
new StringEntity(cursor(otherCursor).mode(mode).toString(), ContentType.APPLICATION_JSON),
mode
);
adminCursor = (String) adminResponse.remove("cursor");
otherCursor = (String) otherResponse.remove("cursor");
assertResponse(adminResponse, otherResponse);
@ -100,10 +114,14 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
public void expectDescribe(Map<String, List<String>> columns, String user) throws Exception {
String mode = randomMode();
Map<String, Object> expected = new HashMap<>(3);
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo(mode, "column", "keyword", JDBCType.VARCHAR, 32766),
columnInfo(mode, "type", "keyword", JDBCType.VARCHAR, 32766),
columnInfo(mode, "mapping", "keyword", JDBCType.VARCHAR, 32766)));
columnInfo(mode, "mapping", "keyword", JDBCType.VARCHAR, 32766)
)
);
List<List<String>> rows = new ArrayList<>(columns.size());
for (Map.Entry<String, List<String>> column : columns.entrySet()) {
List<String> cols = new ArrayList<>();
@ -142,10 +160,9 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
* by the time the test runs.
*/
@SuppressWarnings("unchecked")
List<List<String>> rowsNoSecurity = ((List<List<String>>) actual.get("rows"))
.stream()
.filter(ls -> ls.get(0).startsWith(".security") == false)
.collect(Collectors.toList());
List<List<String>> rowsNoSecurity = ((List<List<String>>) actual.get("rows")).stream()
.filter(ls -> ls.get(0).startsWith(".security") == false)
.collect(Collectors.toList());
actual.put("rows", rowsNoSecurity);
assertResponse(expected, actual);
}
@ -231,20 +248,28 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
createUser("full_access", "rest_minimal");
final String mode = randomMode();
Map<String, Object> adminResponse = RestActions.runSql(null,
new StringEntity(query("SELECT * FROM test").mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON), mode);
Map<String, Object> adminResponse = RestActions.runSql(
null,
new StringEntity(query("SELECT * FROM test").mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON),
mode
);
String cursor = (String) adminResponse.remove("cursor");
assertNotNull(cursor);
ResponseException e = expectThrows(ResponseException.class, () -> RestActions.runSql("full_access",
new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), mode));
ResponseException e = expectThrows(
ResponseException.class,
() -> RestActions.runSql(
"full_access",
new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON),
mode
)
);
// TODO return a better error message for bad scrolls
assertThat(e.getMessage(), containsString("No search context found for id"));
assertEquals(404, e.getResponse().getStatusLine().getStatusCode());
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
.expect(true, SQL_ACTION_NAME, "full_access", empty())
// one scroll access denied per shard
.expect("access_denied", SQL_ACTION_NAME, "full_access", "default_native", empty(), "InternalScrollSearchRequest")
@ -253,21 +278,30 @@ public class RestSqlSecurityIT extends SqlSecurityTestCase {
protected class RestAuditLogAsserter extends AuditLogAsserter {
@Override
public AuditLogAsserter expect(String eventType, String action, String principal, String realm,
Matcher<? extends Iterable<? extends String>> indicesMatcher, String request) {
final Matcher<String> runByPrincipalMatcher = principal.equals("test_admin") ? Matchers.nullValue(String.class)
: Matchers.is("test_admin");
final Matcher<String> runByRealmMatcher = realm.equals("default_file") ? Matchers.nullValue(String.class)
: Matchers.is("default_file");
public AuditLogAsserter expect(
String eventType,
String action,
String principal,
String realm,
Matcher<? extends Iterable<? extends String>> indicesMatcher,
String request
) {
final Matcher<String> runByPrincipalMatcher = principal.equals("test_admin")
? Matchers.nullValue(String.class)
: Matchers.is("test_admin");
final Matcher<String> runByRealmMatcher = realm.equals("default_file")
? Matchers.nullValue(String.class)
: Matchers.is("default_file");
logCheckers.add(
m -> eventType.equals(m.get("event.action"))
&& action.equals(m.get("action"))
&& principal.equals(m.get("user.name"))
&& realm.equals(m.get("user.realm"))
&& runByPrincipalMatcher.matches(m.get("user.run_by.name"))
&& runByRealmMatcher.matches(m.get("user.run_by.realm"))
&& indicesMatcher.matches(m.get("indices"))
&& request.equals(m.get("request.name")));
m -> eventType.equals(m.get("event.action"))
&& action.equals(m.get("action"))
&& principal.equals(m.get("user.name"))
&& realm.equals(m.get("user.realm"))
&& runByPrincipalMatcher.matches(m.get("user.run_by.name"))
&& runByRealmMatcher.matches(m.get("user.run_by.realm"))
&& indicesMatcher.matches(m.get("indices"))
&& request.equals(m.get("request.name"))
);
return this;
}

View File

@ -58,22 +58,31 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
*/
protected interface Actions {
String minimalPermissionsForAllActions();
void queryWorksAsAdmin() throws Exception;
/**
* Assert that running some sql as a user returns the same result as running it as
* the administrator.
*/
void expectMatchesAdmin(String adminSql, String user, String userSql) throws Exception;
/**
* Same as {@link #expectMatchesAdmin(String, String, String)} but sets the scroll size
* to 1 and completely scrolls the results.
*/
void expectScrollMatchesAdmin(String adminSql, String user, String userSql) throws Exception;
void expectDescribe(Map<String, List<String>> columns, String user) throws Exception;
void expectShowTables(List<String> tables, String user) throws Exception;
void expectForbidden(String user, String sql) throws Exception;
void expectUnknownIndex(String user, String sql) throws Exception;
void expectUnknownColumn(String user, String sql, String column) throws Exception;
void checkNoMonitorMain(String user) throws Exception;
}
@ -87,23 +96,26 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
private static final Path AUDIT_LOG_FILE = lookupAuditLog();
private static final Path ROLLED_OVER_AUDIT_LOG_FILE = lookupRolledOverAuditLog();
@SuppressForbidden(reason="security doesn't work with mock filesystem")
@SuppressForbidden(reason = "security doesn't work with mock filesystem")
private static Path lookupAuditLog() {
String auditLogFileString = System.getProperty("tests.audit.logfile");
if (null == auditLogFileString) {
throw new IllegalStateException("tests.audit.logfile must be set to run this test. It is automatically "
throw new IllegalStateException(
"tests.audit.logfile must be set to run this test. It is automatically "
+ "set by gradle. If you must set it yourself then it should be the absolute path to the audit "
+ "log file generated by running x-pack with audit logging enabled.");
+ "log file generated by running x-pack with audit logging enabled."
);
}
return Paths.get(auditLogFileString);
}
@SuppressForbidden(reason="security doesn't work with mock filesystem")
@SuppressForbidden(reason = "security doesn't work with mock filesystem")
private static Path lookupRolledOverAuditLog() {
String auditLogFileString = System.getProperty("tests.audit.yesterday.logfile");
if (null == auditLogFileString) {
throw new IllegalStateException("tests.audit.yesterday.logfile must be set to run this test. It should be automatically "
+ "set by gradle.");
throw new IllegalStateException(
"tests.audit.yesterday.logfile must be set to run this test. It should be automatically " + "set by gradle."
);
}
return Paths.get(auditLogFileString);
}
@ -205,7 +217,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
@AfterClass
public static void wipeIndicesAfterTests() throws IOException {
try {
wipeAllIndices();
wipeAllIndices();
} finally {
// Clear the static state so other subclasses can reuse it later
oneTimeSetup = false;
@ -220,17 +232,14 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
public void testQueryWorksAsAdmin() throws Exception {
actions.queryWorksAsAdmin();
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.assertLogs();
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test").assertLogs();
}
public void testQueryWithFullAccess() throws Exception {
createUser("full_access", actions.minimalPermissionsForAllActions());
actions.expectMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("full_access", "test")
.assertLogs();
}
@ -239,8 +248,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
createUser("full_access", actions.minimalPermissionsForAllActions());
actions.expectScrollMatchesAdmin("SELECT * FROM test ORDER BY a", "full_access", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
/* Scrolling doesn't have to access the index again, at least not through sql.
* If we asserted query and scroll logs then we would see the scroll. */
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
@ -255,9 +263,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
createUser("no_access", "read_nothing");
actions.expectForbidden("no_access", "SELECT * FROM test");
createAuditLogAsserter()
.expect(false, SQL_ACTION_NAME, "no_access", empty())
.assertLogs();
createAuditLogAsserter().expect(false, SQL_ACTION_NAME, "no_access", empty()).assertLogs();
}
public void testQueryWrongAccess() throws Exception {
@ -265,9 +271,9 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectUnknownIndex("wrong_access", "SELECT * FROM test");
createAuditLogAsserter()
//This user has permission to run sql queries so they are given preliminary authorization
// This user has permission to run sql queries so they are given preliminary authorization
.expect(true, SQL_ACTION_NAME, "wrong_access", empty())
//the following get index is granted too but against the no indices placeholder, as ignore_unavailable=true
// the following get index is granted too but against the no indices placeholder, as ignore_unavailable=true
.expect(true, FieldCapabilitiesAction.NAME, "wrong_access", hasItems("*", "-*"))
.assertLogs();
}
@ -276,8 +282,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
createUser("only_a", "read_test_a");
actions.expectMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("only_a", "test")
.assertLogs();
}
@ -286,8 +291,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
createUser("only_a", "read_test_a");
actions.expectScrollMatchesAdmin("SELECT a FROM test ORDER BY a", "only_a", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
/* Scrolling doesn't have to access the index again, at least not through sql.
* If we asserted query and scroll logs then we would see the scroll. */
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
@ -308,17 +312,14 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
* query from the audit side because all the permissions checked
* out but it failed in SQL because it couldn't compile the
* query without the metadata for the missing field. */
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("only_a", "test")
.assertLogs();
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("only_a", "test").assertLogs();
}
public void testQuerySingleFieldExcepted() throws Exception {
createUser("not_c", "read_test_a_and_b");
actions.expectMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("not_c", "test")
.assertLogs();
}
@ -327,8 +328,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
createUser("not_c", "read_test_a_and_b");
actions.expectScrollMatchesAdmin("SELECT a, b FROM test ORDER BY a", "not_c", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
/* Scrolling doesn't have to access the index again, at least not through sql.
* If we asserted query and scroll logs then we would see the scroll. */
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
@ -349,34 +349,28 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
* query from the audit side because all the permissions checked
* out but it failed in SQL because it couldn't compile the
* query without the metadata for the missing field. */
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("not_c", "test")
.assertLogs();
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("not_c", "test").assertLogs();
}
public void testQueryDocumentExcluded() throws Exception {
createUser("no_3s", "read_test_without_c_3");
actions.expectMatchesAdmin("SELECT * FROM test WHERE c != 3 ORDER BY a", "no_3s", "SELECT * FROM test ORDER BY a");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("no_3s", "test")
.assertLogs();
}
public void testShowTablesWorksAsAdmin() throws Exception {
actions.expectShowTables(Arrays.asList("bort", "test"), null);
createAuditLogAsserter()
.expectSqlCompositeActionGetIndex("test_admin", "bort", "test")
.assertLogs();
createAuditLogAsserter().expectSqlCompositeActionGetIndex("test_admin", "bort", "test").assertLogs();
}
public void testShowTablesWorksAsFullAccess() throws Exception {
createUser("full_access", actions.minimalPermissionsForAllActions());
actions.expectMatchesAdmin("SHOW TABLES LIKE '%t'", "full_access", "SHOW TABLES");
createAuditLogAsserter()
.expectSqlCompositeActionGetIndex("test_admin", "bort", "test")
createAuditLogAsserter().expectSqlCompositeActionGetIndex("test_admin", "bort", "test")
.expectSqlCompositeActionGetIndex("full_access", "bort", "test")
.assertLogs();
}
@ -385,17 +379,15 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
createUser("no_access", "read_nothing");
actions.expectForbidden("no_access", "SHOW TABLES");
createAuditLogAsserter()
.expect(false, SQL_ACTION_NAME, "no_access", empty())
.assertLogs();
createAuditLogAsserter().expect(false, SQL_ACTION_NAME, "no_access", empty()).assertLogs();
}
public void testShowTablesWithLimitedAccess() throws Exception {
createUser("read_bort", "read_bort");
actions.expectMatchesAdmin("SHOW TABLES LIKE 'bort'", "read_bort", "SHOW TABLES");
createAuditLogAsserter()
.expectSqlCompositeActionGetIndex("test_admin", "bort").expectSqlCompositeActionGetIndex("read_bort", "bort")
createAuditLogAsserter().expectSqlCompositeActionGetIndex("test_admin", "bort")
.expectSqlCompositeActionGetIndex("read_bort", "bort")
.assertLogs();
}
@ -403,8 +395,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
createUser("read_bort", "read_bort");
actions.expectMatchesAdmin("SHOW TABLES LIKE 'not-created'", "read_bort", "SHOW TABLES LIKE 'test'");
createAuditLogAsserter()
.expect(true, SQL_ACTION_NAME, "test_admin", empty())
createAuditLogAsserter().expect(true, SQL_ACTION_NAME, "test_admin", empty())
.expect(true, GetIndexAction.NAME, "test_admin", contains("*", "-*"))
.expect(true, SQL_ACTION_NAME, "read_bort", empty())
.expect(true, GetIndexAction.NAME, "read_bort", contains("*", "-*"))
@ -417,17 +408,14 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
expected.put("b", asList("BIGINT", "long"));
expected.put("c", asList("BIGINT", "long"));
actions.expectDescribe(expected, null);
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
.assertLogs();
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test").assertLogs();
}
public void testDescribeWorksAsFullAccess() throws Exception {
createUser("full_access", actions.minimalPermissionsForAllActions());
actions.expectMatchesAdmin("DESCRIBE test", "full_access", "DESCRIBE test");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("full_access", "test")
.assertLogs();
}
@ -436,9 +424,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
createUser("no_access", "read_nothing");
actions.expectForbidden("no_access", "DESCRIBE test");
createAuditLogAsserter()
.expect(false, SQL_ACTION_NAME, "no_access", empty())
.assertLogs();
createAuditLogAsserter().expect(false, SQL_ACTION_NAME, "no_access", empty()).assertLogs();
}
public void testDescribeWithWrongAccess() throws Exception {
@ -446,9 +432,9 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
actions.expectDescribe(Collections.emptyMap(), "wrong_access");
createAuditLogAsserter()
//This user has permission to run sql queries so they are given preliminary authorization
// This user has permission to run sql queries so they are given preliminary authorization
.expect(true, SQL_ACTION_NAME, "wrong_access", empty())
//the following get index is granted too but against the no indices placeholder, as ignore_unavailable=true
// the following get index is granted too but against the no indices placeholder, as ignore_unavailable=true
.expect(true, FieldCapabilitiesAction.NAME, "wrong_access", hasItems("*", "-*"))
.assertLogs();
}
@ -457,9 +443,7 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
createUser("only_a", "read_test_a");
actions.expectDescribe(singletonMap("a", asList("BIGINT", "long")), "only_a");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("only_a", "test")
.assertLogs();
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("only_a", "test").assertLogs();
}
public void testDescribeSingleFieldExcepted() throws Exception {
@ -469,17 +453,14 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
expected.put("a", asList("BIGINT", "long"));
expected.put("b", asList("BIGINT", "long"));
actions.expectDescribe(expected, "not_c");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("not_c", "test")
.assertLogs();
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("not_c", "test").assertLogs();
}
public void testDescribeDocumentExcluded() throws Exception {
createUser("no_3s", "read_test_without_c_3");
actions.expectMatchesAdmin("DESCRIBE test", "no_3s", "DESCRIBE test");
createAuditLogAsserter()
.expectSqlCompositeActionFieldCaps("test_admin", "test")
createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test")
.expectSqlCompositeActionFieldCaps("no_3s", "test")
.assertLogs();
}
@ -500,7 +481,8 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
protected static void createUser(String name, String role) throws IOException {
Request request = new Request("PUT", "/_security/user/" + name);
XContentBuilder user = JsonXContent.contentBuilder().prettyPrint();
user.startObject(); {
user.startObject();
{
user.field("password", "testpass");
user.field("roles", role);
}
@ -533,45 +515,58 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
return this;
}
public AuditLogAsserter expect(boolean granted, String action, String principal,
Matcher<? extends Iterable<? extends String>> indicesMatcher) {
public AuditLogAsserter expect(
boolean granted,
String action,
String principal,
Matcher<? extends Iterable<? extends String>> indicesMatcher
) {
String request;
switch (action) {
case SQL_ACTION_NAME:
request = "SqlQueryRequest";
break;
case GetIndexAction.NAME:
request = GetIndexRequest.class.getSimpleName();
break;
case SQL_ACTION_NAME:
request = "SqlQueryRequest";
break;
case GetIndexAction.NAME:
request = GetIndexRequest.class.getSimpleName();
break;
case FieldCapabilitiesAction.NAME:
request = FieldCapabilitiesRequest.class.getSimpleName();
break;
default:
throw new IllegalArgumentException("Unknown action [" + action + "]");
default:
throw new IllegalArgumentException("Unknown action [" + action + "]");
}
final String eventAction = granted ? "access_granted" : "access_denied";
final String realm = principal.equals("test_admin") ? "default_file" : "default_native";
return expect(eventAction, action, principal, realm, indicesMatcher, request);
}
public AuditLogAsserter expect(String eventAction, String action, String principal, String realm,
Matcher<? extends Iterable<? extends String>> indicesMatcher, String request) {
logCheckers.add(m ->
eventAction.equals(m.get("event.action"))
&& action.equals(m.get("action"))
&& principal.equals(m.get("user.name"))
&& realm.equals(m.get("user.realm"))
&& Matchers.nullValue(String.class).matches(m.get("user.run_by.name"))
&& Matchers.nullValue(String.class).matches(m.get("user.run_by.realm"))
&& indicesMatcher.matches(m.get("indices"))
&& request.equals(m.get("request.name"))
public AuditLogAsserter expect(
String eventAction,
String action,
String principal,
String realm,
Matcher<? extends Iterable<? extends String>> indicesMatcher,
String request
) {
logCheckers.add(
m -> eventAction.equals(m.get("event.action"))
&& action.equals(m.get("action"))
&& principal.equals(m.get("user.name"))
&& realm.equals(m.get("user.realm"))
&& Matchers.nullValue(String.class).matches(m.get("user.run_by.name"))
&& Matchers.nullValue(String.class).matches(m.get("user.run_by.realm"))
&& indicesMatcher.matches(m.get("indices"))
&& request.equals(m.get("request.name"))
);
return this;
}
public void assertLogs() throws Exception {
assertFalse("Previous test had an audit-related failure. All subsequent audit related assertions are bogus because we can't "
+ "guarantee that we fully cleaned up after the last test.", auditFailure);
assertFalse(
"Previous test had an audit-related failure. All subsequent audit related assertions are bogus because we can't "
+ "guarantee that we fully cleaned up after the last test.",
auditFailure
);
try {
// use a second variable since the `assertBusy()` block can be executed multiple times and the
// static auditFileRolledOver value can change and mess up subsequent calls of this code block
@ -624,18 +619,17 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
if (++index == 2) {
break;
}
}
else {
} else {
try {
final Map<String, Object> log = XContentHelper.convertToMap(JsonXContent.jsonXContent, line, false);
if (false == ("access_denied".equals(log.get("event.action"))
|| "access_granted".equals(log.get("event.action")))) {
|| "access_granted".equals(log.get("event.action")))) {
continue;
}
assertThat(log.containsKey("action"), is(true));
if (false == (SQL_ACTION_NAME.equals(log.get("action"))
|| GetIndexAction.NAME.equals(log.get("action"))
|| FieldCapabilitiesAction.NAME.equals(log.get("action")))) {
|| GetIndexAction.NAME.equals(log.get("action"))
|| FieldCapabilitiesAction.NAME.equals(log.get("action")))) {
// TODO we may want to extend this and the assertions to SearchAction.NAME as well
continue;
}
@ -651,8 +645,9 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
* SQL drops them from the interface. So we might have access to them, but we
* don't show them.
*/
indices = indices.stream().filter(
idx -> false == RestrictedIndicesNames.isRestricted(idx)).collect(Collectors.toList());
indices = indices.stream()
.filter(idx -> false == RestrictedIndicesNames.isRestricted(idx))
.collect(Collectors.toList());
}
}
// Use a sorted list for indices for consistent error reporting
@ -678,8 +673,14 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
notMatching.add(c);
}
if (false == notMatching.isEmpty()) {
fail("Some checkers " + notMatching + " didn't match any logs. All logs:" + logsMessage(allLogs)
+ "\nRemaining logs:" + logsMessage(logs));
fail(
"Some checkers "
+ notMatching
+ " didn't match any logs. All logs:"
+ logsMessage(allLogs)
+ "\nRemaining logs:"
+ logsMessage(logs)
);
}
if (false == logs.isEmpty()) {
fail("Not all logs matched. Unmatched logs:" + logsMessage(logs));
@ -687,8 +688,10 @@ public abstract class SqlSecurityTestCase extends ESRestTestCase {
});
} catch (AssertionError e) {
auditFailure = true;
logger.warn("Failed to find an audit log. Skipping remaining tests in this class after this the missing audit"
+ "logs could turn up later.");
logger.warn(
"Failed to find an audit log. Skipping remaining tests in this class after this the missing audit"
+ "logs could turn up later."
);
throw e;
}
}

View File

@ -44,20 +44,20 @@ public class UserFunctionIT extends ESRestTestCase {
private List<String> users;
@Rule
public TestName name = new TestName();
@Override
protected Settings restClientSettings() {
return RestSqlIT.securitySettings();
}
@Override
protected String getProtocol() {
return RestSqlIT.SSL_ENABLED ? "https" : "http";
}
@Before
private void setUpUsers() throws IOException {
int usersCount = name.getMethodName().startsWith("testSingle") ? 1 : randomIntBetween(5, 15);
int usersCount = name.getMethodName().startsWith("testSingle") ? 1 : randomIntBetween(5, 15);
users = new ArrayList<>(usersCount);
users.addAll(randomUnique(() -> randomAlphaOfLengthBetween(1, 15), usersCount));
for (String user : users) {
@ -71,53 +71,44 @@ public class UserFunctionIT extends ESRestTestCase {
deleteUser(user);
}
}
public void testSingleRandomUser() throws IOException {
String mode = randomMode().toString();
String randomUserName = users.get(0);
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("columns", Arrays.asList(columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName)));
Map<String, Object> actual = runSql(randomUserName, mode, SQL);
assertResponse(expected, actual);
}
public void testSingleRandomUserWithWhereEvaluatingTrue() throws IOException {
index("{\"test\":\"doc1\"}",
"{\"test\":\"doc2\"}",
"{\"test\":\"doc3\"}");
index("{\"test\":\"doc1\"}", "{\"test\":\"doc2\"}", "{\"test\":\"doc3\"}");
String mode = randomMode().toString();
String randomUserName = users.get(0);
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName),
Arrays.asList(randomUserName),
Arrays.asList(randomUserName)));
expected.put("columns", Arrays.asList(columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName), Arrays.asList(randomUserName), Arrays.asList(randomUserName)));
Map<String, Object> actual = runSql(randomUserName, mode, SQL + " FROM test WHERE USER()='" + randomUserName + "' LIMIT 3");
assertResponse(expected, actual);
}
public void testSingleRandomUserWithWhereEvaluatingFalse() throws IOException {
index("{\"test\":\"doc1\"}",
"{\"test\":\"doc2\"}",
"{\"test\":\"doc3\"}");
index("{\"test\":\"doc1\"}", "{\"test\":\"doc2\"}", "{\"test\":\"doc3\"}");
String mode = randomMode().toString();
String randomUserName = users.get(0);
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("columns", Arrays.asList(columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Collections.<ArrayList<String>>emptyList());
String anotherRandomUserName = randomValueOtherThan(randomUserName, () -> randomAlphaOfLengthBetween(1, 15));
Map<String, Object> actual = runSql(randomUserName, mode, SQL + " FROM test WHERE USER()='" + anotherRandomUserName + "' LIMIT 3");
assertResponse(expected, actual);
}
public void testMultipleRandomUsersAccess() throws IOException {
// run 30 queries and pick randomly each time one of the 5-15 users created previously
for (int i = 0; i < 30; i++) {
@ -125,38 +116,33 @@ public class UserFunctionIT extends ESRestTestCase {
String randomlyPickedUsername = randomFrom(users);
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("columns", Arrays.asList(columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomlyPickedUsername)));
Map<String, Object> actual = runSql(randomlyPickedUsername, mode, SQL);
// expect the user that ran the query to be the same as the one returned by the `USER()` function
assertResponse(expected, actual);
}
}
public void testSingleUserSelectFromIndex() throws IOException {
index("{\"test\":\"doc1\"}",
"{\"test\":\"doc2\"}",
"{\"test\":\"doc3\"}");
index("{\"test\":\"doc1\"}", "{\"test\":\"doc2\"}", "{\"test\":\"doc3\"}");
String mode = randomMode().toString();
String randomUserName = users.get(0);
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName),
Arrays.asList(randomUserName),
Arrays.asList(randomUserName)));
expected.put("columns", Arrays.asList(columnInfo(mode, "USER()", "keyword", JDBCType.VARCHAR, 32766)));
expected.put("rows", Arrays.asList(Arrays.asList(randomUserName), Arrays.asList(randomUserName), Arrays.asList(randomUserName)));
Map<String, Object> actual = runSql(randomUserName, mode, "SELECT USER() FROM test LIMIT 3");
assertResponse(expected, actual);
}
private void createUser(String name, String role) throws IOException {
Request request = new Request("PUT", "/_security/user/" + name);
XContentBuilder user = JsonXContent.contentBuilder().prettyPrint();
user.startObject(); {
user.startObject();
{
user.field("password", "testpass");
user.field("roles", role);
}
@ -164,12 +150,12 @@ public class UserFunctionIT extends ESRestTestCase {
request.setJsonEntity(Strings.toString(user));
client().performRequest(request);
}
private void deleteUser(String name) throws IOException {
Request request = new Request("DELETE", "/_security/user/" + name);
client().performRequest(request);
}
private Map<String, Object> runSql(String asUser, String mode, String sql) throws IOException {
Request request = new Request("POST", SQL_QUERY_REST_ENDPOINT);
if (asUser != null) {
@ -180,7 +166,7 @@ public class UserFunctionIT extends ESRestTestCase {
request.setEntity(new StringEntity(query(sql).mode(mode).toString(), ContentType.APPLICATION_JSON));
return toMap(client().performRequest(request), mode);
}
private void assertResponse(Map<String, Object> expected, Map<String, Object> actual) {
if (false == expected.equals(actual)) {
NotEqualMessageBuilder message = new NotEqualMessageBuilder();

View File

@ -0,0 +1,9 @@
grant {
// Needed to read the audit log file
permission java.io.FilePermission "${tests.audit.logfile}", "read";
permission java.io.FilePermission "${tests.audit.yesterday.logfile}", "read";
//// Required by ssl subproject:
// Required for the net client to setup ssl rather than use global ssl.
permission java.lang.RuntimePermission "setFactory";
};

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.single_node;
import org.elasticsearch.xpack.sql.qa.cli.ErrorsTestCase;
public class CliErrorsIT extends ErrorsTestCase {
}
public class CliErrorsIT extends ErrorsTestCase {}

View File

@ -68,8 +68,10 @@ public class CliExplainIT extends CliIntegrationTestCase {
assertThat(readLine(), startsWith(" \\_UnresolvedRelation[test]"));
assertEquals("", readLine());
assertThat(command("EXPLAIN " + (randomBoolean() ? "" : "(PLAN ANALYZED) ") + "SELECT * FROM test WHERE i = 2"),
containsString("plan"));
assertThat(
command("EXPLAIN " + (randomBoolean() ? "" : "(PLAN ANALYZED) ") + "SELECT * FROM test WHERE i = 2"),
containsString("plan")
);
assertThat(readLine(), startsWith("----------"));
assertThat(readLine(), startsWith("Project[[test.i{f}#"));
assertThat(readLine(), startsWith("\\_Filter[test.i{f}#"));
@ -123,8 +125,7 @@ public class CliExplainIT extends CliIntegrationTestCase {
assertThat(readLine(), startsWith(" \\_UnresolvedRelation[test]"));
assertEquals("", readLine());
assertThat(command("EXPLAIN " + (randomBoolean() ? "" : "(PLAN ANALYZED) ") + "SELECT COUNT(*) FROM test"),
containsString("plan"));
assertThat(command("EXPLAIN " + (randomBoolean() ? "" : "(PLAN ANALYZED) ") + "SELECT COUNT(*) FROM test"), containsString("plan"));
assertThat(readLine(), startsWith("----------"));
assertThat(readLine(), startsWith("Aggregate[[],[COUNT(*)"));
assertThat(readLine(), startsWith("\\_EsRelation[test][i{f}#"));

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.single_node;
import org.elasticsearch.xpack.sql.qa.cli.FetchSizeTestCase;
public class CliFetchSizeIT extends FetchSizeTestCase {
}
public class CliFetchSizeIT extends FetchSizeTestCase {}

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.single_node;
import org.elasticsearch.xpack.sql.qa.cli.SelectTestCase;
public class CliSelectIT extends SelectTestCase {
}
public class CliSelectIT extends SelectTestCase {}

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.single_node;
import org.elasticsearch.xpack.sql.qa.cli.ShowTestCase;
public class CliShowIT extends ShowTestCase {
}
public class CliShowIT extends ShowTestCase {}

View File

@ -33,6 +33,6 @@ public class JdbcCsvSpecIT extends CsvSpecTestCase {
protected int fetchSize() {
// using a smaller fetchSize for nested documents' tests to uncover bugs
// similar to https://github.com/elastic/elasticsearch/issues/35176 quicker
return fileName.startsWith("nested") && randomBoolean() ? randomIntBetween(1,5) : super.fetchSize();
return fileName.startsWith("nested") && randomBoolean() ? randomIntBetween(1, 5) : super.fetchSize();
}
}

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.single_node;
import org.elasticsearch.xpack.sql.qa.jdbc.DatabaseMetaDataTestCase;
public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase {
}
public class JdbcDatabaseMetaDataIT extends DatabaseMetaDataTestCase {}

View File

@ -30,9 +30,9 @@ import static org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.specParser;
* gets reflected in the dataset structure.
* The doc tests while redundant, try to be expressive first and foremost and sometimes
* the dataset isn't exactly convenient.
*
*
* Also looking around for the tests across the test files isn't trivial.
*
*
* That's not to say the two cannot be merged however that felt like too much of an effort
* at this stage and, to not keep things stalling, started with this approach.
*/
@ -68,7 +68,7 @@ public class JdbcDocCsvSpecIT extends SpecBaseIntegrationTestCase {
//
// uncomment this to printout the result set and create new CSV tests
//
//JdbcTestUtils.logLikeCLI(elastic, log);
// JdbcTestUtils.logLikeCLI(elastic, log);
JdbcAssert.assertResultSets(expected, elastic, log, true, true);
}
@ -87,4 +87,4 @@ public class JdbcDocCsvSpecIT extends SpecBaseIntegrationTestCase {
assertResults(expected, elasticResults);
}
}
}
}

View File

@ -35,7 +35,6 @@ public class JdbcFrozenCsvSpecIT extends CsvSpecTestCase {
return props;
}
public JdbcFrozenCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) {
super(fileName, groupName, testName, lineNumber, testCase);
}

View File

@ -25,8 +25,8 @@ public class JdbcShardFailureIT extends JdbcIntegrationTestCase {
client().performRequest(createTest1);
Request createTest2 = new Request("PUT", "/test2");
String body2 = "{\"aliases\":{\"test\":{}}, \"mappings\": {\"properties\": {\"test_field\":{\"type\":\"integer\"}}}," +
"\"settings\": {\"index.routing.allocation.include.node\": \"nowhere\"}}";
String body2 = "{\"aliases\":{\"test\":{}}, \"mappings\": {\"properties\": {\"test_field\":{\"type\":\"integer\"}}},"
+ "\"settings\": {\"index.routing.allocation.include.node\": \"nowhere\"}}";
createTest2.setJsonEntity(body2);
createTest2.addParameter("timeout", "100ms");
client().performRequest(createTest2);

View File

@ -7,5 +7,4 @@ package org.elasticsearch.xpack.sql.qa.single_node;
import org.elasticsearch.xpack.sql.qa.jdbc.ShowTablesTestCase;
public class JdbcShowTablesIT extends ShowTablesTestCase {
}
public class JdbcShowTablesIT extends ShowTablesTestCase {}

View File

@ -11,4 +11,4 @@ public class JdbcSqlSpecIT extends SqlSpecTestCase {
public JdbcSqlSpecIT(String fileName, String groupName, String testName, Integer lineNumber, String query) {
super(fileName, groupName, testName, lineNumber, query);
}
}
}

View File

@ -17,40 +17,56 @@ import static org.hamcrest.Matchers.containsString;
*/
public class RestSqlIT extends RestSqlTestCase {
public void testErrorMessageForTranslatingQueryWithWhereEvaluatingToFalse() throws IOException {
index("{\"foo\":1}");
expectBadRequest(() -> runTranslateSql(query("SELECT * FROM test WHERE foo = 1 AND foo = 2").toString()),
containsString("Cannot generate a query DSL for an SQL query that either its WHERE clause evaluates " +
"to FALSE or doesn't operate on a table (missing a FROM clause), sql statement: " +
"[SELECT * FROM test WHERE foo = 1 AND foo = 2]"));
expectBadRequest(
() -> runTranslateSql(query("SELECT * FROM test WHERE foo = 1 AND foo = 2").toString()),
containsString(
"Cannot generate a query DSL for an SQL query that either its WHERE clause evaluates "
+ "to FALSE or doesn't operate on a table (missing a FROM clause), sql statement: "
+ "[SELECT * FROM test WHERE foo = 1 AND foo = 2]"
)
);
}
public void testErrorMessageForTranslatingQueryWithLocalExecution() throws IOException {
index("{\"foo\":1}");
expectBadRequest(() -> runTranslateSql(query("SELECT SIN(PI())").toString()),
containsString("Cannot generate a query DSL for an SQL query that either its WHERE clause evaluates " +
"to FALSE or doesn't operate on a table (missing a FROM clause), sql statement: [SELECT SIN(PI())]"));
expectBadRequest(
() -> runTranslateSql(query("SELECT SIN(PI())").toString()),
containsString(
"Cannot generate a query DSL for an SQL query that either its WHERE clause evaluates "
+ "to FALSE or doesn't operate on a table (missing a FROM clause), sql statement: [SELECT SIN(PI())]"
)
);
}
public void testErrorMessageForTranslatingSQLCommandStatement() throws IOException {
index("{\"foo\":1}");
expectBadRequest(() -> runTranslateSql(query("SHOW FUNCTIONS").toString()),
containsString("Cannot generate a query DSL for a special SQL command " +
"(e.g.: DESCRIBE, SHOW), sql statement: [SHOW FUNCTIONS]"));
expectBadRequest(
() -> runTranslateSql(query("SHOW FUNCTIONS").toString()),
containsString(
"Cannot generate a query DSL for a special SQL command " + "(e.g.: DESCRIBE, SHOW), sql statement: [SHOW FUNCTIONS]"
)
);
}
public void testErrorMessageForInvalidParamDataType() throws IOException {
// proto.Mode not available
expectBadRequest(() -> runTranslateSql(
query("SELECT null WHERE 0 = ?").mode("odbc").params("[{\"type\":\"invalid\", \"value\":\"irrelevant\"}]").toString()),
containsString("Invalid parameter data type [invalid]"));
expectBadRequest(
() -> runTranslateSql(
query("SELECT null WHERE 0 = ?").mode("odbc").params("[{\"type\":\"invalid\", \"value\":\"irrelevant\"}]").toString()
),
containsString("Invalid parameter data type [invalid]")
);
}
public void testErrorMessageForInvalidParamSpec() throws IOException {
expectBadRequest(() -> runTranslateSql(
query("SELECT null WHERE 0 = ?").mode("odbc").params("[{\"type\":\"SHAPE\", \"value\":false}]").toString()),
containsString("Cannot cast value [false] of type [BOOLEAN] to parameter type [SHAPE]"));
expectBadRequest(
() -> runTranslateSql(
query("SELECT null WHERE 0 = ?").mode("odbc").params("[{\"type\":\"SHAPE\", \"value\":false}]").toString()
),
containsString("Cannot cast value [false] of type [BOOLEAN] to parameter type [SHAPE]")
);
}
}

View File

@ -8,5 +8,4 @@ package org.elasticsearch.xpack.sql.qa.single_node;
import org.elasticsearch.xpack.sql.qa.rest.RestSqlUsageTestCase;
public class RestSqlUsageIT extends RestSqlUsageTestCase {
}
public class RestSqlUsageIT extends RestSqlUsageTestCase {}

View File

@ -8,5 +8,4 @@ package org.elasticsearch.xpack.sql.qa.single_node;
import org.elasticsearch.xpack.sql.qa.SqlProtocolTestCase;
public class SqlProtocolIT extends SqlProtocolTestCase {
}
public class SqlProtocolIT extends SqlProtocolTestCase {}

View File

@ -30,31 +30,37 @@ import java.util.Locale;
* with different timezones and custom date formats for the date fields in ES.
*/
public abstract class CustomDateFormatTestCase extends BaseRestSqlTestCase {
private static String[] customFormats = new String[] {"HH:mm yyyy-MM-dd", "HH:mm:ss yyyy-dd-MM", "HH:mm:ss VV", "HH:mm:ss VV z",
"yyyy-MM-dd'T'HH:mm:ss'T'VV'T'z"};
private static String[] nowFunctions = new String[] {"NOW()", "CURRENT_DATE()", "CURRENT_TIME()", "CURRENT_TIMESTAMP()"};
private static String[] operators = new String[] {" < ", " > ", " <= ", " >= ", " = ", " != "};
private static String[] customFormats = new String[] {
"HH:mm yyyy-MM-dd",
"HH:mm:ss yyyy-dd-MM",
"HH:mm:ss VV",
"HH:mm:ss VV z",
"yyyy-MM-dd'T'HH:mm:ss'T'VV'T'z" };
private static String[] nowFunctions = new String[] { "NOW()", "CURRENT_DATE()", "CURRENT_TIME()", "CURRENT_TIMESTAMP()" };
private static String[] operators = new String[] { " < ", " > ", " <= ", " >= ", " = ", " != " };
public void testCustomDateFormatsWithNowFunctions() throws IOException {
createIndex();
String[] docs = new String[customFormats.length];
String zID = JdbcIntegrationTestCase.randomKnownTimeZone();
StringBuilder datesConditions = new StringBuilder();
for (int i = 0; i < customFormats.length; i++) {
String field = "date_" + i;
docs[i] = "{\"" + field + "\":\"" +
DateTimeFormatter.ofPattern(customFormats[i], Locale.ROOT).format(DateUtils.nowWithMillisResolution()) + "\"}";
docs[i] = "{\""
+ field
+ "\":\""
+ DateTimeFormatter.ofPattern(customFormats[i], Locale.ROOT).format(DateUtils.nowWithMillisResolution())
+ "\"}";
datesConditions.append(i > 0 ? " OR " : "").append(field + randomFrom(operators) + randomFrom(nowFunctions));
}
index(docs);
Request request = new Request("POST", RestSqlTestCase.SQL_QUERY_REST_ENDPOINT);
final String query = "SELECT COUNT(*) AS c FROM test WHERE " + datesConditions.toString();
request.setEntity(new StringEntity(query(query).mode(Mode.PLAIN).timeZone(zID).toString(),
ContentType.APPLICATION_JSON));
request.setEntity(new StringEntity(query(query).mode(Mode.PLAIN).timeZone(zID).toString(), ContentType.APPLICATION_JSON));
Response response = client().performRequest(request);
String expectedJsonSnippet = "{\"columns\":[{\"name\":\"c\",\"type\":\"long\"}],\"rows\":[[";
@ -70,22 +76,25 @@ public abstract class CustomDateFormatTestCase extends BaseRestSqlTestCase {
assertTrue(actualJson.startsWith(expectedJsonSnippet));
}
}
private void createIndex() throws IOException {
Request request = new Request("PUT", "/test");
Request request = new Request("PUT", "/test");
XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject();
index.startObject("mappings"); {
index.startObject("properties"); {
index.startObject("mappings");
{
index.startObject("properties");
{
for (int i = 0; i < customFormats.length; i++) {
String fieldName = "date_" + i;
index.startObject(fieldName); {
index.startObject(fieldName);
{
index.field("type", "date");
index.field("format", customFormats[i]);
}
index.endObject();
}
index.endObject();
index.endObject();
}
}
index.endObject();

View File

@ -12,16 +12,28 @@ package org.elasticsearch.xpack.sql.qa;
*/
public interface ErrorsTestCase {
void testSelectInvalidSql() throws Exception;
void testSelectFromMissingIndex() throws Exception;
void testSelectColumnFromMissingIndex() throws Exception;
void testSelectFromEmptyIndex() throws Exception;
void testSelectColumnFromEmptyIndex() throws Exception;
void testSelectMissingField() throws Exception;
void testSelectMissingFunction() throws Exception;
void testSelectProjectScoreInAggContext() throws Exception;
void testSelectOrderByScoreInAggContext() throws Exception;
void testSelectGroupByScore() throws Exception;
void testSelectScoreSubField() throws Exception;
void testSelectScoreInScalar() throws Exception;
void testHardLimitForSortOnAggregate() throws Exception;
}

View File

@ -49,18 +49,16 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
String text = randomAlphaOfLength(20);
boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting
boolean enableSource = randomBoolean(); // enable _source at index level
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
createIndexWithFieldTypeAndProperties("text", null, explicitSourceSetting ? indexProps : null);
index("{\"text_field\":\"" + text + "\"}");
if (explicitSourceSetting == false || enableSource) {
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo("plain", "text_field", "text", JDBCType.VARCHAR, Integer.MAX_VALUE)
));
expected.put("columns", Arrays.asList(columnInfo("plain", "text_field", "text", JDBCType.VARCHAR, Integer.MAX_VALUE)));
expected.put("rows", singletonList(singletonList(text)));
assertResponse(expected, runSql(query));
} else {
@ -80,10 +78,10 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting
boolean enableSource = randomBoolean(); // enable _source at index level
boolean ignoreAbove = randomBoolean();
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
Map<String, Map<String, Object>> fieldProps = null;
if (ignoreAbove) {
fieldProps = new HashMap<>(1);
@ -91,14 +89,12 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
fieldProp.put("ignore_above", 10);
fieldProps.put("keyword_field", fieldProp);
}
createIndexWithFieldTypeAndProperties("keyword", fieldProps, explicitSourceSetting ? indexProps : null);
index("{\"keyword_field\":\"" + keyword + "\"}");
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo("plain", "keyword_field", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE)
));
expected.put("columns", Arrays.asList(columnInfo("plain", "keyword_field", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE)));
expected.put("rows", singletonList(singletonList(ignoreAbove ? null : keyword)));
assertResponse(expected, runSql("SELECT keyword_field FROM test"));
}
@ -114,10 +110,10 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
// _source for `constant_keyword` fields doesn't matter, as they should be taken from docvalue_fields
boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting
boolean enableSource = randomBoolean(); // enable _source at index level
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
Map<String, Map<String, Object>> fieldProps = null;
if (randomBoolean()) {
fieldProps = new HashMap<>(1);
@ -125,14 +121,15 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
fieldProp.put("value", value);
fieldProps.put("constant_keyword_field", fieldProp);
}
createIndexWithFieldTypeAndProperties("constant_keyword", fieldProps, explicitSourceSetting ? indexProps : null);
index("{\"constant_keyword_field\":\"" + value + "\"}");
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo("plain", "constant_keyword_field", "constant_keyword", JDBCType.VARCHAR, Integer.MAX_VALUE)
));
expected.put(
"columns",
Arrays.asList(columnInfo("plain", "constant_keyword_field", "constant_keyword", JDBCType.VARCHAR, Integer.MAX_VALUE))
);
expected.put("rows", singletonList(singletonList(value)));
assertResponse(expected, runSql("SELECT constant_keyword_field FROM test"));
}
@ -145,15 +142,16 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
public void testFractionsForNonFloatingPointTypes() throws IOException {
String floatingPointNumber = "123.456";
String fieldType = randomFrom("long", "integer", "short", "byte");
createIndexWithFieldTypeAndProperties(fieldType, null, null);
index("{\"" + fieldType + "_field\":\"" + floatingPointNumber + "\"}");
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo("plain", fieldType + "_field", fieldType, jdbcTypeFor(fieldType), Integer.MAX_VALUE)
));
expected.put(
"columns",
Arrays.asList(columnInfo("plain", fieldType + "_field", fieldType, jdbcTypeFor(fieldType), Integer.MAX_VALUE))
);
// because "coerce" is true, a "123.456" floating point number STRING should be converted to 123, no matter the numeric field type
expected.put("rows", singletonList(singletonList(123)));
assertResponse(expected, runSql("SELECT " + fieldType + "_field FROM test"));
@ -169,7 +167,7 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
String floatingPointNumber = "123.456";
String fieldType = randomFrom("double", "float", "half_float", "scaled_float");
boolean isScaledFloat = fieldType == "scaled_float";
Map<String, Map<String, Object>> fieldProps = null;
if (isScaledFloat) {
fieldProps = new HashMap<>(1);
@ -177,20 +175,27 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
fieldProp.put("scaling_factor", 10); // scaling_factor is required for "scaled_float"
fieldProps.put(fieldType + "_field", fieldProp);
}
createIndexWithFieldTypeAndProperties(fieldType, fieldProps, null);
// important here is to pass floatingPointNumber as a string: "float_field": "123.456"
index("{\"" + fieldType + "_field\":\"" + floatingPointNumber + "\"}");
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo("plain", fieldType + "_field", fieldType, jdbcTypeFor(fieldType), Integer.MAX_VALUE)
));
expected.put(
"columns",
Arrays.asList(columnInfo("plain", fieldType + "_field", fieldType, jdbcTypeFor(fieldType), Integer.MAX_VALUE))
);
// because "coerce" is true, a "123.456" floating point number STRING should be converted to 123.456 as number
// and converted to 123.5 for "scaled_float" type
expected.put("rows", singletonList(singletonList(
isScaledFloat ? 123.5 : (fieldType != "double" ? Double.valueOf(123.456f) : Double.valueOf(floatingPointNumber)))));
expected.put(
"rows",
singletonList(
singletonList(
isScaledFloat ? 123.5 : (fieldType != "double" ? Double.valueOf(123.456f) : Double.valueOf(floatingPointNumber))
)
)
);
assertResponse(expected, runSql("SELECT " + fieldType + "_field FROM test"));
}
@ -237,7 +242,7 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
// Integers for short and byte values
testField("byte", ((Number) randomByte()).intValue());
}
private void testField(String fieldType, Object value) throws IOException {
String fieldName = fieldType + "_field";
String query = "SELECT " + fieldName + " FROM test";
@ -245,10 +250,10 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting
boolean enableSource = randomBoolean(); // enable _source at index level
boolean ignoreMalformed = randomBoolean(); // ignore_malformed is true, thus test a non-number value
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
Map<String, Map<String, Object>> fieldProps = null;
if (ignoreMalformed) {
fieldProps = new HashMap<>(1);
@ -258,15 +263,13 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
fieldProps.put(fieldName, fieldProp);
actualValue = "\"foo\"";
}
createIndexWithFieldTypeAndProperties(fieldType, fieldProps, explicitSourceSetting ? indexProps : null);
index("{\"" + fieldName + "\":" + actualValue + "}");
if (explicitSourceSetting == false || enableSource) {
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo("plain", fieldName, fieldType, jdbcTypeFor(fieldType), Integer.MAX_VALUE)
));
expected.put("columns", Arrays.asList(columnInfo("plain", fieldName, fieldType, jdbcTypeFor(fieldType), Integer.MAX_VALUE)));
expected.put("rows", singletonList(singletonList(ignoreMalformed ? null : actualValue)));
assertResponse(expected, runSql(query));
} else {
@ -285,22 +288,20 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting
boolean enableSource = randomBoolean(); // enable _source at index level
boolean asString = randomBoolean(); // pass true or false as string "true" or "false
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
createIndexWithFieldTypeAndProperties("boolean", null, explicitSourceSetting ? indexProps : null);
if (asString) {
index("{\"boolean_field\":\"" + booleanField + "\"}");
} else {
index("{\"boolean_field\":" + booleanField + "}");
}
if (explicitSourceSetting == false || enableSource) {
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo("plain", "boolean_field", "boolean", JDBCType.BOOLEAN, Integer.MAX_VALUE)
));
expected.put("columns", Arrays.asList(columnInfo("plain", "boolean_field", "boolean", JDBCType.BOOLEAN, Integer.MAX_VALUE)));
// adding the boolean as a String here because parsing the response will yield a "true"/"false" String
expected.put("rows", singletonList(singletonList(asString ? String.valueOf(booleanField) : booleanField)));
assertResponse(expected, runSql(query));
@ -319,18 +320,16 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
String ipField = "192.168.1.1";
boolean explicitSourceSetting = randomBoolean(); // default (no _source setting) or explicit setting
boolean enableSource = randomBoolean(); // enable _source at index level
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
createIndexWithFieldTypeAndProperties("ip", null, explicitSourceSetting ? indexProps : null);
index("{\"ip_field\":\"" + ipField + "\"}");
if (explicitSourceSetting == false || enableSource) {
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo("plain", "ip_field", "ip", JDBCType.VARCHAR, Integer.MAX_VALUE)
));
expected.put("columns", Arrays.asList(columnInfo("plain", "ip_field", "ip", JDBCType.VARCHAR, Integer.MAX_VALUE)));
expected.put("rows", singletonList(singletonList(ipField)));
assertResponse(expected, runSql(query));
} else {
@ -353,16 +352,19 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
*/
public void testAliasFromDocValueField() throws IOException {
String keyword = randomAlphaOfLength(20);
createIndexWithFieldTypeAndAlias("keyword", null, null);
index("{\"keyword_field\":\"" + keyword + "\"}");
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo("plain", "keyword_field", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE),
columnInfo("plain", "keyword_field_alias", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE),
columnInfo("plain", "a.b.c.keyword_field_alias", "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE)
));
)
);
expected.put("rows", singletonList(Arrays.asList(keyword, keyword, keyword)));
assertResponse(expected, runSql("SELECT keyword_field, keyword_field_alias, a.b.c.keyword_field_alias FROM test"));
}
@ -382,16 +384,19 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
*/
public void testAliasFromSourceField() throws IOException {
String text = randomAlphaOfLength(20);
createIndexWithFieldTypeAndAlias("text", null, null);
index("{\"text_field\":\"" + text + "\"}");
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo("plain", "text_field", "text", JDBCType.VARCHAR, Integer.MAX_VALUE),
columnInfo("plain", "text_field_alias", "text", JDBCType.VARCHAR, Integer.MAX_VALUE),
columnInfo("plain", "a.b.c.text_field_alias", "text", JDBCType.VARCHAR, Integer.MAX_VALUE)
));
)
);
expected.put("rows", singletonList(Arrays.asList(text, null, null)));
assertResponse(expected, runSql("SELECT text_field, text_field_alias, a.b.c.text_field_alias FROM test"));
}
@ -411,16 +416,19 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
*/
public void testAliasAggregatableFromSourceField() throws IOException {
int number = randomInt();
createIndexWithFieldTypeAndAlias("integer", null, null);
index("{\"integer_field\":" + number + "}");
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo("plain", "integer_field", "integer", JDBCType.INTEGER, Integer.MAX_VALUE),
columnInfo("plain", "integer_field_alias", "integer", JDBCType.INTEGER, Integer.MAX_VALUE),
columnInfo("plain", "a.b.c.integer_field_alias", "integer", JDBCType.INTEGER, Integer.MAX_VALUE)
));
)
);
expected.put("rows", singletonList(Arrays.asList(number, null, number)));
assertResponse(expected, runSql("SELECT integer_field, integer_field_alias, a.b.c.integer_field_alias FROM test"));
}
@ -445,10 +453,10 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
String fieldName = "text_field";
String subFieldName = "text_field.keyword_subfield";
String query = "SELECT " + fieldName + "," + subFieldName + " FROM test";
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
Map<String, Map<String, Object>> subFieldsProps = null;
if (ignoreAbove) {
subFieldsProps = new HashMap<>(1);
@ -456,27 +464,28 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
fieldProp.put("ignore_above", 10);
subFieldsProps.put(subFieldName, fieldProp);
}
createIndexWithFieldTypeAndSubFields("text", null, explicitSourceSetting ? indexProps : null, subFieldsProps, "keyword");
index("{\"" + fieldName + "\":\"" + text + "\"}");
if (explicitSourceSetting == false || enableSource) {
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo("plain", fieldName, "text", JDBCType.VARCHAR, Integer.MAX_VALUE),
columnInfo("plain", subFieldName, "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE)
));
)
);
expected.put("rows", singletonList(Arrays.asList(text, ignoreAbove ? null : text)));
assertResponse(expected, runSql(query));
} else {
expectSourceDisabledError(query);
// even if the _source is disabled, selecting only the keyword sub-field should work as expected
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
columnInfo("plain", subFieldName, "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE)
));
expected.put("columns", Arrays.asList(columnInfo("plain", subFieldName, "keyword", JDBCType.VARCHAR, Integer.MAX_VALUE)));
expected.put("rows", singletonList(singletonList(ignoreAbove ? null : text)));
assertResponse(expected, runSql("SELECT text_field.keyword_subfield FROM test"));
@ -502,11 +511,11 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
Object actualValue = number;
String fieldName = "text_field";
String subFieldName = "text_field.integer_subfield";
String query = "SELECT " + fieldName + "," + subFieldName +" FROM test";
String query = "SELECT " + fieldName + "," + subFieldName + " FROM test";
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
Map<String, Map<String, Object>> subFieldsProps = null;
if (ignoreMalformed) {
subFieldsProps = new HashMap<>(1);
@ -516,16 +525,19 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
subFieldsProps.put(subFieldName, fieldProp);
actualValue = "foo";
}
createIndexWithFieldTypeAndSubFields("text", null, explicitSourceSetting ? indexProps : null, subFieldsProps, "integer");
index("{\"" + fieldName + "\":\"" + actualValue + "\"}");
if (explicitSourceSetting == false || enableSource) {
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo("plain", fieldName, "text", JDBCType.VARCHAR, Integer.MAX_VALUE),
columnInfo("plain", subFieldName, "integer", JDBCType.INTEGER, Integer.MAX_VALUE)
));
)
);
if (ignoreMalformed) {
expected.put("rows", singletonList(Arrays.asList("foo", null)));
} else {
@ -559,11 +571,11 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
Object actualValue = number;
String fieldName = "integer_field";
String subFieldName = "integer_field." + (isKeyword ? "keyword_subfield" : "text_subfield");
String query = "SELECT " + fieldName + "," + subFieldName +" FROM test";
String query = "SELECT " + fieldName + "," + subFieldName + " FROM test";
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
Map<String, Map<String, Object>> fieldProps = null;
if (ignoreMalformed) {
fieldProps = new HashMap<>(1);
@ -573,17 +585,25 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
fieldProps.put(fieldName, fieldProp);
actualValue = "foo";
}
createIndexWithFieldTypeAndSubFields("integer", fieldProps, explicitSourceSetting ? indexProps : null, null,
isKeyword ? "keyword" : "text");
createIndexWithFieldTypeAndSubFields(
"integer",
fieldProps,
explicitSourceSetting ? indexProps : null,
null,
isKeyword ? "keyword" : "text"
);
index("{\"" + fieldName + "\":\"" + actualValue + "\"}");
if (explicitSourceSetting == false || enableSource) {
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo("plain", fieldName, "integer", JDBCType.INTEGER, Integer.MAX_VALUE),
columnInfo("plain", subFieldName, isKeyword ? "keyword" : "text", JDBCType.VARCHAR, Integer.MAX_VALUE)
));
)
);
if (ignoreMalformed) {
expected.put("rows", singletonList(Arrays.asList(null, "foo")));
} else {
@ -609,7 +629,7 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
expectSourceDisabledError("SELECT " + fieldName + " FROM test");
}
}
/*
* "integer_field": {
* "type": "integer",
@ -632,10 +652,10 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
String fieldName = "integer_field";
String subFieldName = "integer_field.byte_subfield";
String query = "SELECT " + fieldName + "," + subFieldName + " FROM test";
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
Map<String, Map<String, Object>> fieldProps = null;
if (rootIgnoreMalformed) {
fieldProps = new HashMap<>(1);
@ -650,15 +670,18 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
fieldProp.put("ignore_malformed", true);
subFieldProps.put(subFieldName, fieldProp);
}
createIndexWithFieldTypeAndSubFields("integer", fieldProps, explicitSourceSetting ? indexProps : null, subFieldProps, "byte");
index("{\"" + fieldName + "\":" + number + "}");
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo("plain", fieldName, "integer", JDBCType.INTEGER, Integer.MAX_VALUE),
columnInfo("plain", subFieldName, "byte", JDBCType.TINYINT, Integer.MAX_VALUE)
));
)
);
if (explicitSourceSetting == false || enableSource) {
if (isByte || subFieldIgnoreMalformed) {
expected.put("rows", singletonList(Arrays.asList(number, isByte ? number : null)));
@ -698,10 +721,10 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
String fieldName = "byte_field";
String subFieldName = "byte_field.integer_subfield";
String query = "SELECT " + fieldName + "," + subFieldName + " FROM test";
Map<String, Object> indexProps = new HashMap<>(1);
indexProps.put("_source", enableSource);
Map<String, Map<String, Object>> fieldProps = null;
if (rootIgnoreMalformed) {
fieldProps = new HashMap<>(1);
@ -716,15 +739,18 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
fieldProp.put("ignore_malformed", true);
subFieldProps.put(subFieldName, fieldProp);
}
createIndexWithFieldTypeAndSubFields("byte", fieldProps, explicitSourceSetting ? indexProps : null, subFieldProps, "integer");
index("{\"" + fieldName + "\":" + number + "}");
Map<String, Object> expected = new HashMap<>();
expected.put("columns", Arrays.asList(
expected.put(
"columns",
Arrays.asList(
columnInfo("plain", fieldName, "byte", JDBCType.TINYINT, Integer.MAX_VALUE),
columnInfo("plain", subFieldName, "integer", JDBCType.INTEGER, Integer.MAX_VALUE)
));
)
);
if (explicitSourceSetting == false || enableSource) {
if (isByte || rootIgnoreMalformed) {
expected.put("rows", singletonList(Arrays.asList(isByte ? number : null, number)));
@ -748,77 +774,96 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
return Collections.emptyMap();
}, containsString("unable to fetch fields from _source field: _source is disabled in the mappings for index [test]"));
}
private void createIndexWithFieldTypeAndAlias(String type, Map<String, Map<String, Object>> fieldProps,
Map<String, Object> indexProps) throws IOException {
private void createIndexWithFieldTypeAndAlias(String type, Map<String, Map<String, Object>> fieldProps, Map<String, Object> indexProps)
throws IOException {
createIndexWithFieldTypeAndProperties(type, fieldProps, indexProps, true, false, null);
}
private void createIndexWithFieldTypeAndProperties(String type, Map<String, Map<String, Object>> fieldProps,
Map<String, Object> indexProps) throws IOException {
private void createIndexWithFieldTypeAndProperties(
String type,
Map<String, Map<String, Object>> fieldProps,
Map<String, Object> indexProps
) throws IOException {
createIndexWithFieldTypeAndProperties(type, fieldProps, indexProps, false, false, null);
}
private void createIndexWithFieldTypeAndSubFields(String type, Map<String, Map<String, Object>> fieldProps,
Map<String, Object> indexProps, Map<String, Map<String, Object>> subFieldsProps,
String... subFieldsTypes) throws IOException {
private void createIndexWithFieldTypeAndSubFields(
String type,
Map<String, Map<String, Object>> fieldProps,
Map<String, Object> indexProps,
Map<String, Map<String, Object>> subFieldsProps,
String... subFieldsTypes
) throws IOException {
createIndexWithFieldTypeAndProperties(type, fieldProps, indexProps, false, true, subFieldsProps, subFieldsTypes);
}
private void createIndexWithFieldTypeAndProperties(String type, Map<String, Map<String, Object>> fieldProps,
Map<String, Object> indexProps, boolean withAlias, boolean withSubFields, Map<String, Map<String, Object>> subFieldsProps,
String... subFieldsTypes) throws IOException {
Request request = new Request("PUT", "/test");
private void createIndexWithFieldTypeAndProperties(
String type,
Map<String, Map<String, Object>> fieldProps,
Map<String, Object> indexProps,
boolean withAlias,
boolean withSubFields,
Map<String, Map<String, Object>> subFieldsProps,
String... subFieldsTypes
) throws IOException {
Request request = new Request("PUT", "/test");
XContentBuilder index = JsonXContent.contentBuilder().prettyPrint().startObject();
index.startObject("mappings"); {
index.startObject("mappings");
{
if (indexProps != null) {
for (Entry<String, Object> prop : indexProps.entrySet()) {
if (prop.getValue() instanceof Boolean) {
index.startObject(prop.getKey()); {
index.startObject(prop.getKey());
{
index.field("enabled", prop.getValue());
}
index.endObject();
}
}
}
index.startObject("properties"); {
index.startObject("properties");
{
String fieldName = type + "_field";
index.startObject(fieldName); {
index.startObject(fieldName);
{
index.field("type", type);
if (fieldProps != null && fieldProps.containsKey(fieldName)) {
for (Entry<String, Object> prop : fieldProps.get(fieldName).entrySet()) {
index.field(prop.getKey(), prop.getValue());
}
}
if (withSubFields) {
index.startObject("fields");
for (String subFieldType : subFieldsTypes) {
String subFieldName = subFieldType + "_subfield";
String fullSubFieldName = fieldName + "." + subFieldName;
index.startObject(subFieldName);
index.field("type", subFieldType);
if (subFieldsProps != null && subFieldsProps.containsKey(fullSubFieldName)) {
for (Entry<String, Object> prop : subFieldsProps.get(fullSubFieldName).entrySet()) {
index.field(prop.getKey(), prop.getValue());
}
}
index.endObject();
for (String subFieldType : subFieldsTypes) {
String subFieldName = subFieldType + "_subfield";
String fullSubFieldName = fieldName + "." + subFieldName;
index.startObject(subFieldName);
index.field("type", subFieldType);
if (subFieldsProps != null && subFieldsProps.containsKey(fullSubFieldName)) {
for (Entry<String, Object> prop : subFieldsProps.get(fullSubFieldName).entrySet()) {
index.field(prop.getKey(), prop.getValue());
}
}
index.endObject();
}
index.endObject();
}
}
index.endObject();
if (withAlias) {
// create two aliases - one within a hierarchy, the other just a simple field w/o hierarchy
index.startObject(fieldName + "_alias"); {
index.startObject(fieldName + "_alias");
{
index.field("type", "alias");
index.field("path", fieldName);
}
index.endObject();
index.startObject("a.b.c." + fieldName + "_alias"); {
index.startObject("a.b.c." + fieldName + "_alias");
{
index.field("type", "alias");
index.field("path", fieldName);
}
@ -833,7 +878,7 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
request.setJsonEntity(Strings.toString(index));
client().performRequest(request);
}
private Request buildRequest(String query) {
Request request = new Request("POST", RestSqlTestCase.SQL_QUERY_REST_ENDPOINT);
request.addParameter("error_trace", "true");
@ -842,16 +887,16 @@ public abstract class FieldExtractorTestCase extends BaseRestSqlTestCase {
return request;
}
private Map<String, Object> runSql(String query) throws IOException {
Response response = client().performRequest(buildRequest(query));
try (InputStream content = response.getEntity().getContent()) {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false);
}
}
private JDBCType jdbcTypeFor(String esType) {
switch(esType) {
switch (esType) {
case "long":
return JDBCType.BIGINT;
case "integer":

Some files were not shown because too many files have changed in this diff Show More