EQL: Refactor testing infrastructure (#62928)
Extract reusable methods inside QL TestUtils Rename abstract base classes for clarity Clean-up EQL DataLoader (cherry picked from commit 48db3f285aa8976ead5a9f5d071a9c1046d7bd31)
This commit is contained in:
parent
25106ba58f
commit
ef7a6ce4b2
|
@ -6,12 +6,10 @@
|
|||
|
||||
package org.elasticsearch.test.eql;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.client.EqlClient;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.eql.EqlSearchRequest;
|
||||
import org.elasticsearch.client.eql.EqlSearchResponse;
|
||||
|
@ -21,76 +19,49 @@ import org.elasticsearch.client.eql.EqlSearchResponse.Sequence;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
import static org.elasticsearch.test.eql.DataLoader.testIndexName;
|
||||
|
||||
public abstract class CommonEqlActionTestCase extends ESRestTestCase {
|
||||
public abstract class BaseEqlSpecTestCase extends ESRestTestCase {
|
||||
|
||||
protected static final String PARAM_FORMATTING = "%1$s.test -> %2$s";
|
||||
private static int counter = 0;
|
||||
|
||||
private RestHighLevelClient highLevelClient;
|
||||
|
||||
@BeforeClass
|
||||
public static void checkForSnapshot() {
|
||||
assumeTrue("Only works on snapshot builds for now", Build.CURRENT.isSnapshot());
|
||||
}
|
||||
private final String index;
|
||||
private final String query;
|
||||
private final String name;
|
||||
private final long[] eventIds;
|
||||
private final boolean caseSensitive;
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
if (client().performRequest(new Request("HEAD", "/" + testIndexName)).getStatusLine().getStatusCode() == 404) {
|
||||
DataLoader.loadDatasetIntoEs(highLevelClient(), (t, u) -> createParser(t, u));
|
||||
private void setup() throws Exception {
|
||||
if (client().performRequest(new Request("HEAD", "/" + index)).getStatusLine().getStatusCode() == 404) {
|
||||
DataLoader.loadDatasetIntoEs(highLevelClient(), this::createParser);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup() throws Exception {
|
||||
if (--counter == 0) {
|
||||
deleteIndex(testIndexName);
|
||||
@AfterClass
|
||||
public static void wipeTestData() throws IOException {
|
||||
try {
|
||||
adminClient().performRequest(new Request("DELETE", "/*"));
|
||||
} catch (ResponseException e) {
|
||||
// 404 here just means we had no indexes
|
||||
if (e.getResponse().getStatusLine().getStatusCode() != 404) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING)
|
||||
public static List<Object[]> readTestSpecs() throws Exception {
|
||||
|
||||
// Load EQL validation specs
|
||||
Set<String> uniqueTestNames = new HashSet<>();
|
||||
List<EqlSpec> specs = EqlSpecLoader.load("/test_queries.toml", true, uniqueTestNames);
|
||||
specs.addAll(EqlSpecLoader.load("/additional_test_queries.toml", true, uniqueTestNames));
|
||||
List<EqlSpec> unsupportedSpecs = EqlSpecLoader.load("/test_queries_unsupported.toml", false, uniqueTestNames);
|
||||
|
||||
// Validate only currently supported specs
|
||||
List<EqlSpec> filteredSpecs = new ArrayList<>();
|
||||
|
||||
for (EqlSpec spec : specs) {
|
||||
boolean supported = true;
|
||||
// Check if spec is supported, simple iteration, cause the list is short.
|
||||
for (EqlSpec unSpec : unsupportedSpecs) {
|
||||
if (spec.equals(unSpec)) {
|
||||
supported = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (supported) {
|
||||
filteredSpecs.add(spec);
|
||||
}
|
||||
}
|
||||
counter = specs.size();
|
||||
return asArray(filteredSpecs);
|
||||
}
|
||||
|
||||
private static List<Object[]> asArray(List<EqlSpec> specs) {
|
||||
protected static List<Object[]> asArray(List<EqlSpec> specs) {
|
||||
int counter = 0;
|
||||
List<Object[]> results = new ArrayList<>();
|
||||
|
||||
|
@ -114,12 +85,9 @@ public abstract class CommonEqlActionTestCase extends ESRestTestCase {
|
|||
return results;
|
||||
}
|
||||
|
||||
private final String query;
|
||||
private final String name;
|
||||
private final long[] eventIds;
|
||||
private final boolean caseSensitive;
|
||||
BaseEqlSpecTestCase(String index, String query, String name, long[] eventIds, boolean caseSensitive) {
|
||||
this.index = index;
|
||||
|
||||
public CommonEqlActionTestCase(String query, String name, long[] eventIds, boolean caseSensitive) {
|
||||
this.query = query;
|
||||
this.name = name;
|
||||
this.eventIds = eventIds;
|
||||
|
@ -127,7 +95,7 @@ public abstract class CommonEqlActionTestCase extends ESRestTestCase {
|
|||
}
|
||||
|
||||
public void test() throws Exception {
|
||||
assertResponse(runQuery(testIndexName, query, caseSensitive));
|
||||
assertResponse(runQuery(index, query, caseSensitive));
|
||||
}
|
||||
|
||||
protected void assertResponse(EqlSearchResponse response) {
|
||||
|
@ -150,15 +118,20 @@ public abstract class CommonEqlActionTestCase extends ESRestTestCase {
|
|||
// some queries return more than 10 results
|
||||
request.size(50);
|
||||
request.fetchSize(randomIntBetween(2, 50));
|
||||
return eqlClient().search(request, RequestOptions.DEFAULT);
|
||||
return runRequest(eqlClient(), request);
|
||||
}
|
||||
|
||||
private EqlClient eqlClient() {
|
||||
protected EqlSearchResponse runRequest(EqlClient eqlClient, EqlSearchRequest request) throws IOException {
|
||||
return eqlClient.search(request, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
protected EqlClient eqlClient() {
|
||||
return highLevelClient().eql();
|
||||
}
|
||||
|
||||
protected void assertEvents(List<Event> events) {
|
||||
assertNotNull(events);
|
||||
logger.info("Events {}", events);
|
||||
long[] expected = eventIds;
|
||||
long[] actual = extractIds(events);
|
||||
assertArrayEquals(LoggerMessageFormat.format(null, "unexpected result for spec[{}] [{}] -> {} vs {}", name, query, Arrays.toString(
|
||||
|
@ -166,11 +139,13 @@ public abstract class CommonEqlActionTestCase extends ESRestTestCase {
|
|||
expected, actual);
|
||||
}
|
||||
|
||||
private static long[] extractIds(List<Event> events) {
|
||||
@SuppressWarnings("unchecked")
|
||||
private long[] extractIds(List<Event> events) {
|
||||
final int len = events.size();
|
||||
final long ids[] = new long[len];
|
||||
final long[] ids = new long[len];
|
||||
for (int i = 0; i < len; i++) {
|
||||
ids[i] = ((Number) events.get(i).sourceAsMap().get("serial_event_id")).longValue();
|
||||
Object field = events.get(i).sourceAsMap().get(sequenceField());
|
||||
ids[i] = ((Number) field).longValue();
|
||||
}
|
||||
return ids;
|
||||
}
|
||||
|
@ -199,4 +174,8 @@ public abstract class CommonEqlActionTestCase extends ESRestTestCase {
|
|||
// Need to preserve data between parameterized tests runs
|
||||
return true;
|
||||
}
|
||||
|
||||
protected String sequenceField() {
|
||||
return "sequence";
|
||||
}
|
||||
}
|
|
@ -24,12 +24,12 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.elasticsearch.xpack.ql.TestUtils;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.net.URL;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -39,13 +39,20 @@ import java.util.Map.Entry;
|
|||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.junit.Assert.assertThat;
|
||||
|
||||
/**
|
||||
* Loads EQL dataset into ES.
|
||||
*
|
||||
* Checks for predefined indices:
|
||||
* - endgame-140 - for existing data
|
||||
* - extra - additional data
|
||||
*
|
||||
* While the loader could be made generic, the queries are bound to each index and generalizing that would make things way too complicated.
|
||||
*/
|
||||
public class DataLoader {
|
||||
public static final String TEST_INDEX = "endgame-140";
|
||||
public static final String TEST_EXTRA_INDEX = "extra";
|
||||
|
||||
private static final String TEST_DATA = "/test_data.json";
|
||||
private static final String MAPPING = "/mapping-default.json";
|
||||
private static final Map<String, String[]> replacementPatterns = Collections.unmodifiableMap(getReplacementPatterns());
|
||||
static final String indexPrefix = "endgame";
|
||||
public static final String testIndexName = indexPrefix + "-1.4.0";
|
||||
|
||||
private static final long FILETIME_EPOCH_DIFF = 11644473600000L;
|
||||
private static final long FILETIME_ONE_MILLISECOND = 10 * 1000;
|
||||
|
@ -63,26 +70,52 @@ public class DataLoader {
|
|||
ignore -> {
|
||||
},
|
||||
Collections.emptyList()) {
|
||||
}, (t, u) -> createParser(t, u));
|
||||
}, DataLoader::createParser);
|
||||
}
|
||||
}
|
||||
|
||||
public static void loadDatasetIntoEs(RestHighLevelClient client,
|
||||
CheckedBiFunction<XContent, InputStream, XContentParser, IOException> p) throws IOException {
|
||||
|
||||
createTestIndex(client);
|
||||
loadData(client, p);
|
||||
//
|
||||
// Main Index
|
||||
//
|
||||
load(client, TEST_INDEX, true, p);
|
||||
//
|
||||
// Aux Index
|
||||
//
|
||||
load(client, TEST_EXTRA_INDEX, false, p);
|
||||
}
|
||||
|
||||
private static void createTestIndex(RestHighLevelClient client) throws IOException {
|
||||
CreateIndexRequest request = new CreateIndexRequest(testIndexName).mapping(getMapping(MAPPING), XContentType.JSON);
|
||||
private static void load(RestHighLevelClient client, String indexName, boolean winFileTime,
|
||||
CheckedBiFunction<XContent, InputStream, XContentParser, IOException> p) throws IOException {
|
||||
String name = "/data/" + indexName + ".mapping";
|
||||
URL mapping = DataLoader.class.getResource(name);
|
||||
if (mapping == null) {
|
||||
throw new IllegalArgumentException("Cannot find resource " + name);
|
||||
}
|
||||
name = "/data/" + indexName + ".data";
|
||||
URL data = DataLoader.class.getResource(name);
|
||||
if (data == null) {
|
||||
throw new IllegalArgumentException("Cannot find resource " + name);
|
||||
}
|
||||
createTestIndex(client, indexName, readMapping(mapping));
|
||||
loadData(client, indexName, winFileTime, data, p);
|
||||
}
|
||||
|
||||
private static void createTestIndex(RestHighLevelClient client, String indexName, String mapping) throws IOException {
|
||||
CreateIndexRequest request = new CreateIndexRequest(indexName);
|
||||
if (mapping != null) {
|
||||
request.mapping(mapping, XContentType.JSON);
|
||||
}
|
||||
client.indices().create(request, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
private static String getMapping(String mappingPath) throws IOException {
|
||||
try (BufferedReader reader = new BufferedReader(
|
||||
new InputStreamReader(DataLoader.class.getResourceAsStream(mappingPath), StandardCharsets.UTF_8)))
|
||||
{
|
||||
/**
|
||||
* Reads the mapping file, ignoring comments and replacing placeholders for random types.
|
||||
*/
|
||||
private static String readMapping(URL resource) throws IOException {
|
||||
try (BufferedReader reader = TestUtils.reader(resource)) {
|
||||
StringBuilder b = new StringBuilder();
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
|
@ -98,27 +131,30 @@ public class DataLoader {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static void loadData(RestHighLevelClient client, CheckedBiFunction<XContent, InputStream, XContentParser, IOException> p)
|
||||
private static void loadData(RestHighLevelClient client, String indexName, boolean winfileTime, URL resource,
|
||||
CheckedBiFunction<XContent, InputStream, XContentParser, IOException> p)
|
||||
throws IOException {
|
||||
BulkRequest bulk = new BulkRequest();
|
||||
bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
|
||||
|
||||
try (XContentParser parser = p.apply(JsonXContent.jsonXContent, DataLoader.class.getResourceAsStream(TEST_DATA))) {
|
||||
try (XContentParser parser = p.apply(JsonXContent.jsonXContent, TestUtils.inputStream(resource))) {
|
||||
List<Object> list = parser.list();
|
||||
for (Object item : list) {
|
||||
assertThat(item, instanceOf(Map.class));
|
||||
Map<String, Object> entry = (Map<String, Object>) item;
|
||||
if (winfileTime) {
|
||||
transformDataset(entry);
|
||||
bulk.add(new IndexRequest(testIndexName).source(entry, XContentType.JSON));
|
||||
}
|
||||
bulk.add(new IndexRequest(indexName).source(entry, XContentType.JSON));
|
||||
}
|
||||
}
|
||||
|
||||
if (bulk.numberOfActions() > 0) {
|
||||
BulkResponse bulkResponse = client.bulk(bulk, RequestOptions.DEFAULT);
|
||||
if (bulkResponse.hasFailures()) {
|
||||
LogManager.getLogger(DataLoader.class).info("Data FAILED loading");
|
||||
LogManager.getLogger(DataLoader.class).info("Data loading FAILED");
|
||||
} else {
|
||||
LogManager.getLogger(DataLoader.class).info("Data loaded");
|
||||
LogManager.getLogger(DataLoader.class).info("Data loading OK");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.eql;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.test.eql.DataLoader.TEST_EXTRA_INDEX;
|
||||
|
||||
public abstract class EqlExtraSpecTestCase extends BaseEqlSpecTestCase {
|
||||
|
||||
@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING)
|
||||
public static List<Object[]> readTestSpecs() throws Exception {
|
||||
return asArray(EqlSpecLoader.load("/test_extra.toml", true, new HashSet<>()));
|
||||
}
|
||||
|
||||
public EqlExtraSpecTestCase(String query, String name, long[] eventIds, boolean caseSensitive) {
|
||||
super(TEST_EXTRA_INDEX, query, name, eventIds, caseSensitive);
|
||||
}
|
||||
}
|
|
@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public abstract class CommonEqlRestTestCase extends ESRestTestCase {
|
||||
public abstract class EqlRestTestCase extends ESRestTestCase {
|
||||
|
||||
private static final String defaultValidationIndexName = "eql_search_validation_test";
|
||||
private static final String validQuery = "process where user = \\\"SYSTEM\\\"";
|
|
@ -20,6 +20,9 @@ import java.util.Set;
|
|||
public class EqlSpecLoader {
|
||||
public static List<EqlSpec> load(String path, boolean supported, Set<String> uniqueTestNames) throws Exception {
|
||||
try (InputStream is = EqlSpecLoader.class.getResourceAsStream(path)) {
|
||||
if (is == null) {
|
||||
throw new IllegalAccessException("Cannot find classpath resource " + path);
|
||||
}
|
||||
return readFromStream(is, supported, uniqueTestNames);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.eql;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.test.eql.DataLoader.TEST_INDEX;
|
||||
|
||||
public abstract class EqlSpecTestCase extends BaseEqlSpecTestCase {
|
||||
|
||||
@ParametersFactory(shuffle = false, argumentFormatting = PARAM_FORMATTING)
|
||||
public static List<Object[]> readTestSpecs() throws Exception {
|
||||
|
||||
// Load EQL validation specs
|
||||
Set<String> uniqueTestNames = new HashSet<>();
|
||||
List<EqlSpec> specs = EqlSpecLoader.load("/test_queries.toml", true, uniqueTestNames);
|
||||
specs.addAll(EqlSpecLoader.load("/additional_test_queries.toml", true, uniqueTestNames));
|
||||
List<EqlSpec> unsupportedSpecs = EqlSpecLoader.load("/test_queries_unsupported.toml", false, uniqueTestNames);
|
||||
|
||||
// Validate only currently supported specs
|
||||
List<EqlSpec> filteredSpecs = new ArrayList<>();
|
||||
|
||||
for (EqlSpec spec : specs) {
|
||||
boolean supported = true;
|
||||
// Check if spec is supported, simple iteration, cause the list is short.
|
||||
for (EqlSpec unSpec : unsupportedSpecs) {
|
||||
if (spec.equals(unSpec)) {
|
||||
supported = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (supported) {
|
||||
filteredSpecs.add(spec);
|
||||
}
|
||||
}
|
||||
return asArray(filteredSpecs);
|
||||
}
|
||||
|
||||
public EqlSpecTestCase(String query, String name, long[] eventIds, boolean caseSensitive) {
|
||||
super(TEST_INDEX, query, name, eventIds, caseSensitive);
|
||||
}
|
||||
}
|
|
@ -6,7 +6,6 @@
|
|||
|
||||
package org.elasticsearch.test.eql.stats;
|
||||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
|
@ -14,7 +13,6 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
|
|||
import org.elasticsearch.test.eql.DataLoader;
|
||||
import org.elasticsearch.test.rest.ESRestTestCase;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -27,23 +25,18 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.unmodifiableSet;
|
||||
import static org.elasticsearch.test.eql.DataLoader.testIndexName;
|
||||
|
||||
|
||||
/**
|
||||
* Tests a random number of queries that increase various (most of the times, one query will "touch" multiple metrics values) metrics.
|
||||
*/
|
||||
public abstract class RestEqlUsageTestCase extends ESRestTestCase {
|
||||
public abstract class EqlUsageRestTestCase extends ESRestTestCase {
|
||||
|
||||
private RestHighLevelClient highLevelClient;
|
||||
private Map<String, Integer> baseMetrics = new HashMap<String, Integer>();
|
||||
private Integer baseAllTotalQueries = 0;
|
||||
private Integer baseAllFailedQueries = 0;
|
||||
|
||||
@BeforeClass
|
||||
public static void checkForSnapshot() {
|
||||
assumeTrue("Only works on snapshot builds for now", Build.CURRENT.isSnapshot());
|
||||
}
|
||||
|
||||
/**
|
||||
* This method gets the metrics' values before the test runs, in case these values
|
||||
* were changed by other tests running in the same REST test cluster. The test itself
|
||||
|
@ -123,8 +116,8 @@ public abstract class RestEqlUsageTestCase extends ESRestTestCase {
|
|||
// create the index and load the data, if the index doesn't exist
|
||||
// it doesn't matter if the index is already there (probably created by another test); _if_ its mapping is the expected one
|
||||
// it should be enough
|
||||
if (client().performRequest(new Request("HEAD", "/" + testIndexName)).getStatusLine().getStatusCode() == 404) {
|
||||
DataLoader.loadDatasetIntoEs(highLevelClient(), (t, u) -> createParser(t, u));
|
||||
if (client().performRequest(new Request("HEAD", "/" + DataLoader.TEST_INDEX)).getStatusLine().getStatusCode() == 404) {
|
||||
DataLoader.loadDatasetIntoEs(highLevelClient(), this::createParser);
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -309,8 +302,12 @@ public abstract class RestEqlUsageTestCase extends ESRestTestCase {
|
|||
}
|
||||
|
||||
private void runEql(String eql) throws IOException {
|
||||
Request request = new Request("POST", DataLoader.testIndexName + "/_eql/search");
|
||||
Request request = new Request("POST", DataLoader.TEST_INDEX + "/_eql/search");
|
||||
request.setJsonEntity("{\"query\":\"" + eql +"\"}");
|
||||
runRequest(request);
|
||||
}
|
||||
|
||||
protected void runRequest(Request request) throws IOException {
|
||||
client().performRequest(request);
|
||||
}
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
[
|
||||
{
|
||||
"@timestamp": "1",
|
||||
"event_type": "REQUEST",
|
||||
"transID": 1234,
|
||||
"sequence": 1
|
||||
},
|
||||
{
|
||||
"@timestamp": "2",
|
||||
"event_type": "ERROR",
|
||||
"transID": 1234,
|
||||
"sequence": 2
|
||||
},
|
||||
{
|
||||
"@timestamp": "3",
|
||||
"event_type": "STAT",
|
||||
"transID": 1234,
|
||||
"sequence": 3
|
||||
},
|
||||
{
|
||||
"@timestamp": "10",
|
||||
"event_type": "REQUEST",
|
||||
"transID": 1235,
|
||||
"sequence": 1
|
||||
},
|
||||
{
|
||||
"@timestamp": "11",
|
||||
"event_type": "ERROR",
|
||||
"transID": 1235,
|
||||
"sequence": 2
|
||||
},
|
||||
{
|
||||
"@timestamp": "11",
|
||||
"event_type": "STAT",
|
||||
"transID": 1235,
|
||||
"sequence": 3
|
||||
}
|
||||
]
|
|
@ -0,0 +1,25 @@
|
|||
{
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"type": "date"
|
||||
},
|
||||
"event_type": {
|
||||
"type": "[runtime_random_keyword_type]"
|
||||
},
|
||||
"sequence": {
|
||||
"type": "long"
|
||||
},
|
||||
"event": {
|
||||
"properties": {
|
||||
"category": {
|
||||
"type": "alias",
|
||||
"path": "event_type"
|
||||
},
|
||||
"sequence": {
|
||||
"type": "alias",
|
||||
"path": "sequence"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
[[queries]]
|
||||
name = "basic"
|
||||
query = '''
|
||||
sequence by transID
|
||||
[ REQUEST where true ]
|
||||
[ ERROR where true ]
|
||||
[ STAT where true ]
|
||||
'''
|
||||
expected_event_ids = [1,2,3]
|
||||
|
||||
[[queries]]
|
||||
name = "basicWithFilter"
|
||||
query = '''
|
||||
sequence by transID
|
||||
[ REQUEST where transID == 1234 ]
|
||||
[ ERROR where true ]
|
||||
[ STAT where true ]
|
||||
'''
|
||||
expected_event_ids = [1,2,3]
|
||||
|
||||
[[queries]]
|
||||
name = "basicWithFilters"
|
||||
query = '''
|
||||
sequence by transID
|
||||
[ REQUEST where transID == 1234 ]
|
||||
[ ERROR where transID == 1234 ]
|
||||
[ STAT where transID == 1234 ]
|
||||
'''
|
||||
expected_event_ids = [1,2,3]
|
|
@ -0,0 +1,18 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
import org.elasticsearch.test.eql.EqlExtraSpecTestCase;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
|
||||
@TestLogging(value = "org.elasticsearch.xpack.eql:TRACE", reason = "results logging")
|
||||
public class EqlExtraIT extends EqlExtraSpecTestCase {
|
||||
|
||||
public EqlExtraIT(String query, String name, long[] eventIds, boolean caseSensitive) {
|
||||
super(query, name, eventIds, caseSensitive);
|
||||
}
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
import org.elasticsearch.test.eql.CommonEqlRestTestCase;
|
||||
import org.elasticsearch.test.eql.EqlRestTestCase;
|
||||
|
||||
public class EqlIT extends CommonEqlRestTestCase {
|
||||
public class EqlRestIT extends EqlRestTestCase {
|
||||
}
|
|
@ -6,12 +6,16 @@
|
|||
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
import org.elasticsearch.test.eql.CommonEqlActionTestCase;
|
||||
import org.elasticsearch.test.eql.EqlSpecTestCase;
|
||||
|
||||
public class EqlActionIT extends CommonEqlActionTestCase {
|
||||
public class EqlSpecIT extends EqlSpecTestCase {
|
||||
|
||||
public EqlActionIT(String query, String name, long[] eventIds, boolean caseSensitive) {
|
||||
public EqlSpecIT(String query, String name, long[] eventIds, boolean caseSensitive) {
|
||||
super(query, name, eventIds, caseSensitive);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String sequenceField() {
|
||||
return "serial_event_id";
|
||||
}
|
||||
}
|
|
@ -6,8 +6,8 @@
|
|||
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
import org.elasticsearch.test.eql.stats.RestEqlUsageTestCase;
|
||||
import org.elasticsearch.test.eql.stats.EqlUsageRestTestCase;
|
||||
|
||||
public class EqlStatsIT extends RestEqlUsageTestCase {
|
||||
public class EqlStatsIT extends EqlUsageRestTestCase {
|
||||
|
||||
}
|
||||
|
|
|
@ -7,16 +7,12 @@
|
|||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
@ -24,33 +20,24 @@ import org.elasticsearch.test.rest.ESRestTestCase;
|
|||
import org.elasticsearch.xpack.core.XPackPlugin;
|
||||
import org.elasticsearch.xpack.core.async.AsyncExecutionId;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField.RUN_AS_USER_HEADER;
|
||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
import static org.elasticsearch.xpack.eql.SecurityUtils.secureClientSettings;
|
||||
import static org.elasticsearch.xpack.eql.SecurityUtils.setRunAsHeader;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class AsyncEqlSecurityIT extends ESRestTestCase {
|
||||
|
||||
@BeforeClass
|
||||
public static void checkForSnapshot() {
|
||||
assumeTrue("Only works on snapshot builds for now", Build.CURRENT.isSnapshot());
|
||||
}
|
||||
|
||||
/**
|
||||
* All tests run as a superuser but use <code>es-security-runas-user</code> to become a less privileged user.
|
||||
*/
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
String token = basicAuthHeaderValue("test-admin", new SecureString("x-pack-test-password".toCharArray()));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.build();
|
||||
return secureClientSettings();
|
||||
}
|
||||
|
||||
@Before
|
||||
|
@ -166,11 +153,4 @@ public class AsyncEqlSecurityIT extends ESRestTestCase {
|
|||
static Map<String, Object> toMap(String response) {
|
||||
return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false);
|
||||
}
|
||||
|
||||
static void setRunAsHeader(Request request, String user) {
|
||||
final RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder();
|
||||
builder.addHeader(RUN_AS_USER_HEADER, user);
|
||||
request.setOptions(builder);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.eql.stats.EqlUsageRestTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.xpack.eql.SecurityUtils.secureClientSettings;
|
||||
|
||||
public class EqlStatsIT extends EqlUsageRestTestCase {
|
||||
|
||||
/**
|
||||
* All tests run as a superuser but use <code>es-security-runas-user</code> to become a less privileged user.
|
||||
*/
|
||||
@Override
|
||||
protected Settings restClientSettings() {
|
||||
return secureClientSettings();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void runRequest(Request request) throws IOException {
|
||||
SecurityUtils.setRunAsHeader(request,"test-admin");
|
||||
super.runRequest(request);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.xpack.eql;
|
||||
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
|
||||
import static org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField.RUN_AS_USER_HEADER;
|
||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
|
||||
public class SecurityUtils {
|
||||
|
||||
static Settings secureClientSettings() {
|
||||
String token = basicAuthHeaderValue("test-admin", new SecureString("x-pack-test-password".toCharArray()));
|
||||
return Settings.builder()
|
||||
.put(ThreadContext.PREFIX + ".Authorization", token)
|
||||
.build();
|
||||
}
|
||||
|
||||
static void setRunAsHeader(Request request, String user) {
|
||||
final RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder();
|
||||
builder.addHeader(RUN_AS_USER_HEADER, user);
|
||||
request.setOptions(builder);
|
||||
}
|
||||
|
||||
static void setUserRole(Request request) {
|
||||
setRunAsHeader(request, "user1");
|
||||
}
|
||||
}
|
|
@ -9,6 +9,10 @@ package org.elasticsearch.xpack.ql;
|
|||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.xpack.ql.expression.Expression;
|
||||
|
@ -24,11 +28,28 @@ import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.NullE
|
|||
import org.elasticsearch.xpack.ql.session.Configuration;
|
||||
import org.elasticsearch.xpack.ql.tree.Source;
|
||||
import org.elasticsearch.xpack.ql.type.DataTypes;
|
||||
import org.elasticsearch.xpack.ql.util.StringUtils;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.FileVisitOption;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.jar.JarInputStream;
|
||||
import java.util.zip.ZipEntry;
|
||||
|
||||
import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength;
|
||||
import static org.elasticsearch.test.ESTestCase.randomZone;
|
||||
|
@ -131,4 +152,96 @@ public final class TestUtils {
|
|||
stats = (Map<String, Object>) stats.get("search");
|
||||
return (Integer) stats.get("open_contexts");
|
||||
}
|
||||
|
||||
//
|
||||
// Classpath
|
||||
//
|
||||
/**
|
||||
* Returns the classpath resources matching a simple pattern ("*.csv").
|
||||
* It supports folders separated by "/" (e.g. "/some/folder/*.txt").
|
||||
*
|
||||
* Currently able to resolve resources inside the classpath either from:
|
||||
* folders in the file-system (typically IDEs) or
|
||||
* inside jars (gradle).
|
||||
*/
|
||||
@SuppressForbidden(reason = "classpath discovery")
|
||||
public static List<URL> classpathResources(String pattern) throws IOException {
|
||||
while (pattern.startsWith("/")) {
|
||||
pattern = pattern.substring(1);
|
||||
}
|
||||
|
||||
Tuple<String, String> split = pathAndName(pattern);
|
||||
|
||||
// the root folder searched inside the classpath - default is the root classpath
|
||||
// default file match
|
||||
final String root = split.v1();
|
||||
final String filePattern = split.v2();
|
||||
|
||||
String[] resources = System.getProperty("java.class.path").split(System.getProperty("path.separator"));
|
||||
|
||||
List<URL> matches = new ArrayList<>();
|
||||
|
||||
for (String resource : resources) {
|
||||
Path path = PathUtils.get(resource);
|
||||
|
||||
// check whether we're dealing with a jar
|
||||
// Java 7 java.nio.fileFileSystem can be used on top of ZIPs/JARs but consumes more memory
|
||||
// hence the use of the JAR API
|
||||
if (path.toString().endsWith(".jar")) {
|
||||
try (JarInputStream jar = jarInputStream(path.toUri().toURL())) {
|
||||
ZipEntry entry = null;
|
||||
while ((entry = jar.getNextEntry()) != null) {
|
||||
String name = entry.getName();
|
||||
Tuple<String, String> entrySplit = pathAndName(name);
|
||||
if (root.equals(entrySplit.v1()) && Regex.simpleMatch(filePattern, entrySplit.v2())) {
|
||||
matches.add(new URL("jar:" + path.toUri() + "!/" + name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// normal file access
|
||||
else if (Files.isDirectory(path)) {
|
||||
Files.walkFileTree(path, EnumSet.allOf(FileVisitOption.class), 1, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
if (Regex.simpleMatch(filePattern, file.toString())) {
|
||||
matches.add(file.toUri().toURL());
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
return matches;
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "need to open stream")
|
||||
public static InputStream inputStream(URL resource) throws IOException {
|
||||
URLConnection con = resource.openConnection();
|
||||
// do not to cache files (to avoid keeping file handles around)
|
||||
con.setUseCaches(false);
|
||||
return con.getInputStream();
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "need to open jar")
|
||||
public static JarInputStream jarInputStream(URL resource) throws IOException {
|
||||
return new JarInputStream(inputStream(resource));
|
||||
}
|
||||
|
||||
public static BufferedReader reader(URL resource) throws IOException {
|
||||
return new BufferedReader(new InputStreamReader(inputStream(resource), StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
public static Tuple<String, String> pathAndName(String string) {
|
||||
String folder = StringUtils.EMPTY;
|
||||
String file = string;
|
||||
int lastIndexOf = string.lastIndexOf("/");
|
||||
if (lastIndexOf > 0) {
|
||||
folder = string.substring(0, lastIndexOf - 1);
|
||||
if (lastIndexOf + 1 < string.length()) {
|
||||
file = string.substring(lastIndexOf + 1);
|
||||
}
|
||||
}
|
||||
return new Tuple<>(folder, file);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.qa.jdbc;
|
|||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.xpack.ql.TestUtils;
|
||||
import org.elasticsearch.xpack.sql.qa.jdbc.CsvTestUtils.CsvTestCase;
|
||||
|
||||
import java.net.URL;
|
||||
|
@ -29,7 +30,7 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase {
|
|||
|
||||
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
|
||||
public static List<Object[]> readScriptSpec() throws Exception {
|
||||
List<URL> urls = JdbcTestUtils.classpathResources("/*.csv-spec");
|
||||
List<URL> urls = TestUtils.classpathResources("/*.csv-spec");
|
||||
assertTrue("Not enough specs found (" + urls.size() + ") " + urls.toString(), urls.size() >= 23);
|
||||
return readScriptSpec(urls, specParser());
|
||||
}
|
||||
|
|
|
@ -6,34 +6,14 @@
|
|||
package org.elasticsearch.xpack.sql.qa.jdbc;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.xpack.sql.action.BasicFormatter;
|
||||
import org.elasticsearch.xpack.sql.proto.ColumnInfo;
|
||||
import org.elasticsearch.xpack.sql.proto.StringUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.file.FileVisitOption;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.jar.JarInputStream;
|
||||
import java.util.zip.ZipEntry;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.action.BasicFormatter.FormatOption.CLI;
|
||||
|
||||
|
@ -146,87 +126,4 @@ final class JdbcTestUtils {
|
|||
BasicFormatter formatter = new BasicFormatter(cols, data, CLI);
|
||||
logger.info("\n" + formatter.formatWithHeader(cols, data));
|
||||
}
|
||||
|
||||
static String of(long millis, String zoneId) {
|
||||
return StringUtils.toString(ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.of(zoneId)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the classpath resources matching a simple pattern ("*.csv").
|
||||
* It supports folders separated by "/" (e.g. "/some/folder/*.txt").
|
||||
*
|
||||
* Currently able to resolve resources inside the classpath either from:
|
||||
* folders in the file-system (typically IDEs) or
|
||||
* inside jars (gradle).
|
||||
*/
|
||||
static List<URL> classpathResources(String pattern) throws Exception {
|
||||
while (pattern.startsWith("/")) {
|
||||
pattern = pattern.substring(1);
|
||||
}
|
||||
|
||||
Tuple<String, String> split = pathAndName(pattern);
|
||||
|
||||
// the root folder searched inside the classpath - default is the root classpath
|
||||
// default file match
|
||||
final String root = split.v1();
|
||||
final String filePattern = split.v2();
|
||||
|
||||
String[] resources = System.getProperty("java.class.path").split(System.getProperty("path.separator"));
|
||||
|
||||
List<URL> matches = new ArrayList<>();
|
||||
|
||||
for (String resource : resources) {
|
||||
Path path = PathUtils.get(resource);
|
||||
|
||||
// check whether we're dealing with a jar
|
||||
// Java 7 java.nio.fileFileSystem can be used on top of ZIPs/JARs but consumes more memory
|
||||
// hence the use of the JAR API
|
||||
if (path.toString().endsWith(".jar")) {
|
||||
try (JarInputStream jar = getJarStream(path.toUri().toURL())) {
|
||||
ZipEntry entry = null;
|
||||
while ((entry = jar.getNextEntry()) != null) {
|
||||
String name = entry.getName();
|
||||
Tuple<String, String> entrySplit = pathAndName(name);
|
||||
if (root.equals(entrySplit.v1()) && Regex.simpleMatch(filePattern, entrySplit.v2())) {
|
||||
matches.add(new URL("jar:" + path.toUri() + "!/" + name));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// normal file access
|
||||
else if (Files.isDirectory(path)) {
|
||||
Files.walkFileTree(path, EnumSet.allOf(FileVisitOption.class), 1, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
if (Regex.simpleMatch(filePattern, file.toString())) {
|
||||
matches.add(file.toUri().toURL());
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
return matches;
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "need to open jar")
|
||||
private static JarInputStream getJarStream(URL resource) throws IOException {
|
||||
URLConnection con = resource.openConnection();
|
||||
// do not to cache files (to avoid keeping file handles around)
|
||||
con.setUseCaches(false);
|
||||
return new JarInputStream(con.getInputStream());
|
||||
}
|
||||
|
||||
static Tuple<String, String> pathAndName(String string) {
|
||||
String folder = StringUtils.EMPTY;
|
||||
String file = string;
|
||||
int lastIndexOf = string.lastIndexOf("/");
|
||||
if (lastIndexOf > 0) {
|
||||
folder = string.substring(0, lastIndexOf - 1);
|
||||
if (lastIndexOf + 1 < string.length()) {
|
||||
file = string.substring(lastIndexOf + 1);
|
||||
}
|
||||
}
|
||||
return new Tuple<>(folder, file);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,17 +10,13 @@ import org.elasticsearch.client.Request;
|
|||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.xpack.ql.TestUtils;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Connection;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
|
@ -33,6 +29,7 @@ import java.util.Objects;
|
|||
import java.util.Properties;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static org.elasticsearch.xpack.ql.TestUtils.pathAndName;
|
||||
import static org.elasticsearch.xpack.sql.qa.jdbc.JdbcTestUtils.JDBC_TIMEZONE;
|
||||
|
||||
/**
|
||||
|
@ -168,14 +165,14 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
|
|||
}
|
||||
|
||||
private static List<Object[]> readURLSpec(URL source, Parser parser) throws Exception {
|
||||
String fileName = JdbcTestUtils.pathAndName(source.getFile()).v2();
|
||||
String fileName = pathAndName(source.getFile()).v2();
|
||||
String groupName = fileName.substring(0, fileName.lastIndexOf("."));
|
||||
|
||||
Map<String, Integer> testNames = new LinkedHashMap<>();
|
||||
List<Object[]> testCases = new ArrayList<>();
|
||||
|
||||
String testName = null;
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(readFromJarUrl(source), StandardCharsets.UTF_8))) {
|
||||
try (BufferedReader reader = TestUtils.reader(source)) {
|
||||
String line;
|
||||
int lineNumber = 1;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
|
@ -221,12 +218,4 @@ public abstract class SpecBaseIntegrationTestCase extends JdbcIntegrationTestCas
|
|||
public interface Parser {
|
||||
Object parse(String line);
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "test reads from jar")
|
||||
public static InputStream readFromJarUrl(URL source) throws IOException {
|
||||
URLConnection con = source.openConnection();
|
||||
// do not to cache files (to avoid keeping file handles around)
|
||||
con.setUseCaches(false);
|
||||
return con.getInputStream();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.xpack.sql.qa.jdbc;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.junit.Assume;
|
||||
import org.junit.ClassRule;
|
||||
|
||||
|
@ -20,6 +19,8 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.TimeZone;
|
||||
|
||||
import static org.elasticsearch.xpack.ql.TestUtils.classpathResources;
|
||||
|
||||
/**
|
||||
* Tests comparing sql queries executed against our jdbc client
|
||||
* with those executed against H2's jdbc client.
|
||||
|
@ -33,7 +34,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
|
|||
|
||||
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
|
||||
public static List<Object[]> readScriptSpec() throws Exception {
|
||||
List<URL> urls = JdbcTestUtils.classpathResources("/*.sql-spec");
|
||||
List<URL> urls = classpathResources("/*.sql-spec");
|
||||
assertTrue("Not enough specs found " + urls.toString(), urls.size() > 10);
|
||||
return readScriptSpec(urls, specParser());
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue