[PURIFY] remove all trace of x-pack eql (#5)

This commit removes all trace of EQL from the sanitized fork.

Signed-off-by: Peter Nied <petern@amazon.com>
This commit is contained in:
Nick Knize 2021-01-29 23:28:01 -06:00 committed by Peter Nied
parent 55b8ab4338
commit 168f5e825f
16 changed files with 0 additions and 1667 deletions

View File

@ -10,7 +10,6 @@
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessLexer\.java" checks="." />
<suppress files="modules[/\\]lang-painless[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]painless[/\\]antlr[/\\]PainlessParser(|BaseVisitor|Visitor)\.java" checks="." />
<suppress files="plugin[/\\]sql[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]xpack[/\\]sql[/\\]parser[/\\]SqlBase(Base(Listener|Visitor)|Lexer|Listener|Parser|Visitor).java" checks="." />
<suppress files="plugin[/\\]eql[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]xpack[/\\]eql[/\\]parser[/\\]EqlBase(Base(Listener|Visitor)|Lexer|Listener|Parser|Visitor).java" checks="." />
<!-- JNA requires the no-argument constructor on JNAKernel32Library.SizeT to be public-->
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]bootstrap[/\\]JNAKernel32Library.java" checks="RedundantModifier" />

View File

@ -1,128 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.eql.EqlSearchRequest;
import org.elasticsearch.client.eql.EqlSearchResponse;
import org.elasticsearch.client.eql.EqlStatsRequest;
import org.elasticsearch.client.eql.EqlStatsResponse;
import java.io.IOException;
import java.util.Collections;
/**
* A wrapper for the {@link RestHighLevelClient} that provides methods for
* accessing the Elastic EQL related functions
* <p>
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/eql.html">
* EQL APIs on elastic.co</a> for more information.
*/
public final class EqlClient {
private final RestHighLevelClient restHighLevelClient;
EqlClient(RestHighLevelClient restHighLevelClient) {
this.restHighLevelClient = restHighLevelClient;
}
/**
* Executes the eql search query.
* <p>
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search.html">
* the docs</a> for more.
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public EqlSearchResponse search(EqlSearchRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
request,
EqlRequestConverters::search,
options,
EqlSearchResponse::fromXContent,
Collections.emptySet()
);
}
/**
* Asynchronously executes the eql search query.
* <p>
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search.html">
* the docs</a> for more.
*
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable searchAsync(EqlSearchRequest request,
RequestOptions options,
ActionListener<EqlSearchResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(
request,
EqlRequestConverters::search,
options,
EqlSearchResponse::fromXContent,
listener,
Collections.emptySet()
);
}
/**
* Get the eql stats
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-stats.html">
* the docs</a> for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public EqlStatsResponse stats(EqlStatsRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(
request,
EqlRequestConverters::stats,
options,
EqlStatsResponse::fromXContent,
Collections.emptySet()
);
}
/**
* Asynchronously get the eql stats
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-stats.html">
* the docs</a> for more.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
* @return cancellable that may be used to cancel the request
*/
public Cancellable statsAsync(EqlStatsRequest request, RequestOptions options, ActionListener<EqlStatsResponse> listener) {
return restHighLevelClient.performRequestAsyncAndParseEntity(request,
EqlRequestConverters::stats,
options,
EqlStatsResponse::fromXContent,
listener,
Collections.emptySet()
);
}
}

View File

@ -1,52 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpGet;
import org.elasticsearch.client.eql.EqlSearchRequest;
import org.elasticsearch.client.eql.EqlStatsRequest;
import java.io.IOException;
import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE;
import static org.elasticsearch.client.RequestConverters.createEntity;
final class EqlRequestConverters {
static Request search(EqlSearchRequest eqlSearchRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addCommaSeparatedPathParts(eqlSearchRequest.indices())
.addPathPartAsIs("_eql", "search")
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
RequestConverters.Params parameters = new RequestConverters.Params();
parameters.withIndicesOptions(eqlSearchRequest.indicesOptions());
request.setEntity(createEntity(eqlSearchRequest, REQUEST_BODY_CONTENT_TYPE));
request.addParameters(parameters.asMap());
return request;
}
static Request stats(EqlStatsRequest eqlStatsRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_eql", "stats")
.build();
return new Request(HttpGet.METHOD_NAME, endpoint);
}
}

View File

@ -272,7 +272,6 @@ public class RestHighLevelClient implements Closeable {
private final CcrClient ccrClient = new CcrClient(this);
private final TransformClient transformClient = new TransformClient(this);
private final EnrichClient enrichClient = new EnrichClient(this);
private final EqlClient eqlClient = new EqlClient(this);
private final AsyncSearchClient asyncSearchClient = new AsyncSearchClient(this);
/**
@ -512,20 +511,6 @@ public class RestHighLevelClient implements Closeable {
return enrichClient;
}
/**
* Provides methods for accessing the Elastic EQL APIs that
* are shipped with the Elastic Stack distribution of Elasticsearch. All of
* these APIs will 404 if run against the OSS distribution of Elasticsearch.
* <p>
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/eql.html">
* EQL APIs on elastic.co</a> for more information.
*
* @return the client wrapper for making Data Frame API calls
*/
public final EqlClient eql() {
return eqlClient;
}
/**
* Executes a bulk request using the Bulk API.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html">Bulk API on elastic.co</a>

View File

@ -1,275 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.eql;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;
public class EqlSearchRequest implements Validatable, ToXContentObject {
private String[] indices;
private IndicesOptions indicesOptions = IndicesOptions.fromOptions(true, true, true, false);
private QueryBuilder filter = null;
private String timestampField = "@timestamp";
private String eventCategoryField = "event.category";
private String resultPosition = "head";
private int size = 10;
private int fetchSize = 1000;
private String query;
private String tiebreakerField;
// Async settings
private TimeValue waitForCompletionTimeout;
private boolean keepOnCompletion;
private TimeValue keepAlive;
static final String KEY_FILTER = "filter";
static final String KEY_TIMESTAMP_FIELD = "timestamp_field";
static final String KEY_TIEBREAKER_FIELD = "tiebreaker_field";
static final String KEY_EVENT_CATEGORY_FIELD = "event_category_field";
static final String KEY_SIZE = "size";
static final String KEY_FETCH_SIZE = "fetch_size";
static final String KEY_QUERY = "query";
static final String KEY_RESULT_POSITION = "result_position";
static final String KEY_WAIT_FOR_COMPLETION_TIMEOUT = "wait_for_completion_timeout";
static final String KEY_KEEP_ALIVE = "keep_alive";
static final String KEY_KEEP_ON_COMPLETION = "keep_on_completion";
public EqlSearchRequest(String indices, String query) {
indices(indices);
query(query);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
if (filter != null) {
builder.field(KEY_FILTER, filter);
}
builder.field(KEY_TIMESTAMP_FIELD, timestampField());
if (tiebreakerField != null) {
builder.field(KEY_TIEBREAKER_FIELD, tiebreakerField());
}
builder.field(KEY_EVENT_CATEGORY_FIELD, eventCategoryField());
builder.field(KEY_SIZE, size());
builder.field(KEY_FETCH_SIZE, fetchSize());
builder.field(KEY_RESULT_POSITION, resultPosition());
builder.field(KEY_QUERY, query);
if (waitForCompletionTimeout != null) {
builder.field(KEY_WAIT_FOR_COMPLETION_TIMEOUT, waitForCompletionTimeout);
}
if (keepAlive != null) {
builder.field(KEY_KEEP_ALIVE, keepAlive);
}
builder.field(KEY_KEEP_ON_COMPLETION, keepOnCompletion);
builder.endObject();
return builder;
}
public EqlSearchRequest indices(String... indices) {
Objects.requireNonNull(indices, "indices must not be null");
for (String index : indices) {
Objects.requireNonNull(index, "index must not be null");
}
this.indices = indices;
return this;
}
public QueryBuilder filter() {
return this.filter;
}
public EqlSearchRequest filter(QueryBuilder filter) {
this.filter = filter;
return this;
}
public String timestampField() {
return this.timestampField;
}
public EqlSearchRequest timestampField(String timestampField) {
Objects.requireNonNull(timestampField, "timestamp field must not be null");
this.timestampField = timestampField;
return this;
}
public String tiebreakerField() {
return this.tiebreakerField;
}
public EqlSearchRequest tiebreakerField(String tiebreakerField) {
Objects.requireNonNull(tiebreakerField, "tiebreaker field must not be null");
this.tiebreakerField = tiebreakerField;
return this;
}
public String eventCategoryField() {
return this.eventCategoryField;
}
public EqlSearchRequest eventCategoryField(String eventCategoryField) {
Objects.requireNonNull(eventCategoryField, "event category field must not be null");
this.eventCategoryField = eventCategoryField;
return this;
}
public String resultPosition() {
return resultPosition;
}
public EqlSearchRequest resultPosition(String position) {
if ("head".equals(position) || "tail".equals(position)) {
resultPosition = position;
} else {
throw new IllegalArgumentException("result position needs to be 'head' or 'tail', received '" + position + "'");
}
return this;
}
public int size() {
return this.size;
}
public EqlSearchRequest size(int size) {
if (size < 0) {
throw new IllegalArgumentException("size must be greater than or equal to 0");
}
this.size = size;
return this;
}
public int fetchSize() {
return this.fetchSize;
}
public EqlSearchRequest fetchSize(int fetchSize) {
if (fetchSize < 2) {
throw new IllegalArgumentException("fetch size must be greater than 1");
}
this.fetchSize = fetchSize;
return this;
}
public String query() {
return this.query;
}
public EqlSearchRequest query(String query) {
Objects.requireNonNull(query, "query must not be null");
this.query = query;
return this;
}
public TimeValue waitForCompletionTimeout() {
return waitForCompletionTimeout;
}
public EqlSearchRequest waitForCompletionTimeout(TimeValue waitForCompletionTimeout) {
this.waitForCompletionTimeout = waitForCompletionTimeout;
return this;
}
public Boolean keepOnCompletion() {
return keepOnCompletion;
}
public void keepOnCompletion(Boolean keepOnCompletion) {
this.keepOnCompletion = keepOnCompletion;
}
public TimeValue keepAlive() {
return keepAlive;
}
public EqlSearchRequest keepAlive(TimeValue keepAlive) {
this.keepAlive = keepAlive;
return this;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EqlSearchRequest that = (EqlSearchRequest) o;
return size == that.size &&
fetchSize == that.fetchSize &&
resultPosition == that.resultPosition &&
Arrays.equals(indices, that.indices) &&
Objects.equals(indicesOptions, that.indicesOptions) &&
Objects.equals(filter, that.filter) &&
Objects.equals(size, that.size) &&
Objects.equals(fetchSize, that.fetchSize) &&
Objects.equals(timestampField, that.timestampField) &&
Objects.equals(tiebreakerField, that.tiebreakerField) &&
Objects.equals(eventCategoryField, that.eventCategoryField) &&
Objects.equals(query, that.query) &&
Objects.equals(waitForCompletionTimeout, that.waitForCompletionTimeout) &&
Objects.equals(keepAlive, that.keepAlive) &&
Objects.equals(keepOnCompletion, that.keepOnCompletion);
}
@Override
public int hashCode() {
return Objects.hash(
Arrays.hashCode(indices),
indicesOptions,
filter,
size,
fetchSize,
timestampField,
tiebreakerField,
eventCategoryField,
query,
resultPosition,
waitForCompletionTimeout,
keepAlive,
keepOnCompletion);
}
public String[] indices() {
return Arrays.copyOf(this.indices, this.indices.length);
}
public EqlSearchRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = Objects.requireNonNull(indicesOptions, "indicesOptions must not be null");
return this;
}
public IndicesOptions indicesOptions() {
return indicesOptions;
}
}

View File

@ -1,369 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.eql;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.InstantiatingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class EqlSearchResponse {
private final Hits hits;
private final long tookInMillis;
private final boolean isTimeout;
private final String asyncExecutionId;
private final boolean isRunning;
private final boolean isPartial;
private static final class Fields {
static final String TOOK = "took";
static final String TIMED_OUT = "timed_out";
static final String HITS = "hits";
static final String ID = "id";
static final String IS_RUNNING = "is_running";
static final String IS_PARTIAL = "is_partial";
}
private static final ParseField TOOK = new ParseField(Fields.TOOK);
private static final ParseField TIMED_OUT = new ParseField(Fields.TIMED_OUT);
private static final ParseField HITS = new ParseField(Fields.HITS);
private static final ParseField ID = new ParseField(Fields.ID);
private static final ParseField IS_RUNNING = new ParseField(Fields.IS_RUNNING);
private static final ParseField IS_PARTIAL = new ParseField(Fields.IS_PARTIAL);
private static final InstantiatingObjectParser<EqlSearchResponse, Void> PARSER;
static {
InstantiatingObjectParser.Builder<EqlSearchResponse, Void> parser =
InstantiatingObjectParser.builder("eql/search_response", true, EqlSearchResponse.class);
parser.declareObject(constructorArg(), (p, c) -> Hits.fromXContent(p), HITS);
parser.declareLong(constructorArg(), TOOK);
parser.declareBoolean(constructorArg(), TIMED_OUT);
parser.declareString(optionalConstructorArg(), ID);
parser.declareBoolean(constructorArg(), IS_RUNNING);
parser.declareBoolean(constructorArg(), IS_PARTIAL);
PARSER = parser.build();
}
public EqlSearchResponse(Hits hits, long tookInMillis, boolean isTimeout, String asyncExecutionId,
boolean isRunning, boolean isPartial) {
super();
this.hits = hits == null ? Hits.EMPTY : hits;
this.tookInMillis = tookInMillis;
this.isTimeout = isTimeout;
this.asyncExecutionId = asyncExecutionId;
this.isRunning = isRunning;
this.isPartial = isPartial;
}
public static EqlSearchResponse fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public long took() {
return tookInMillis;
}
public boolean isTimeout() {
return isTimeout;
}
public Hits hits() {
return hits;
}
public String id() {
return asyncExecutionId;
}
public boolean isRunning() {
return isRunning;
}
public boolean isPartial() {
return isPartial;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EqlSearchResponse that = (EqlSearchResponse) o;
return Objects.equals(hits, that.hits)
&& Objects.equals(tookInMillis, that.tookInMillis)
&& Objects.equals(isTimeout, that.isTimeout);
}
@Override
public int hashCode() {
return Objects.hash(hits, tookInMillis, isTimeout);
}
// Event
public static class Event {
private static final class Fields {
static final String INDEX = GetResult._INDEX;
static final String ID = GetResult._ID;
static final String SOURCE = SourceFieldMapper.NAME;
}
private static final ParseField INDEX = new ParseField(Fields.INDEX);
private static final ParseField ID = new ParseField(Fields.ID);
private static final ParseField SOURCE = new ParseField(Fields.SOURCE);
private static final ConstructingObjectParser<Event, Void> PARSER =
new ConstructingObjectParser<>("eql/search_response_event", true,
args -> new Event((String) args[0], (String) args[1], (BytesReference) args[2]));
static {
PARSER.declareString(constructorArg(), INDEX);
PARSER.declareString(constructorArg(), ID);
PARSER.declareObject(constructorArg(), (p, c) -> {
try (XContentBuilder builder = XContentBuilder.builder(p.contentType().xContent())) {
builder.copyCurrentStructure(p);
return BytesReference.bytes(builder);
}
}, SOURCE);
}
private final String index;
private final String id;
private final BytesReference source;
private Map<String, Object> sourceAsMap;
public Event(String index, String id, BytesReference source) {
this.index = index;
this.id = id;
this.source = source;
}
public static Event fromXContent(XContentParser parser) throws IOException {
return PARSER.apply(parser, null);
}
public String index() {
return index;
}
public String id() {
return id;
}
public BytesReference source() {
return source;
}
public Map<String, Object> sourceAsMap() {
if (source == null) {
return null;
}
if (sourceAsMap != null) {
return sourceAsMap;
}
sourceAsMap = SourceLookup.sourceAsMap(source);
return sourceAsMap;
}
@Override
public int hashCode() {
return Objects.hash(index, id, source);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
EqlSearchResponse.Event other = (EqlSearchResponse.Event) obj;
return Objects.equals(index, other.index)
&& Objects.equals(id, other.id)
&& Objects.equals(source, other.source);
}
}
// Sequence
public static class Sequence {
private static final class Fields {
static final String JOIN_KEYS = "join_keys";
static final String EVENTS = "events";
}
private static final ParseField JOIN_KEYS = new ParseField(Fields.JOIN_KEYS);
private static final ParseField EVENTS = new ParseField(Fields.EVENTS);
private static final ConstructingObjectParser<EqlSearchResponse.Sequence, Void> PARSER =
new ConstructingObjectParser<>("eql/search_response_sequence", true,
args -> {
int i = 0;
@SuppressWarnings("unchecked") List<Object> joinKeys = (List<Object>) args[i++];
@SuppressWarnings("unchecked") List<Event> events = (List<Event>) args[i];
return new EqlSearchResponse.Sequence(joinKeys, events);
});
static {
PARSER.declareFieldArray(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> XContentParserUtils.parseFieldsValue(p),
JOIN_KEYS, ObjectParser.ValueType.VALUE_ARRAY);
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> Event.fromXContent(p), EVENTS);
}
private final List<Object> joinKeys;
private final List<Event> events;
public Sequence(List<Object> joinKeys, List<Event> events) {
this.joinKeys = joinKeys == null ? Collections.emptyList() : joinKeys;
this.events = events == null ? Collections.emptyList() : events;
}
public static Sequence fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
public List<Object> joinKeys() {
return joinKeys;
}
public List<Event> events() {
return events;
}
@Override
public int hashCode() {
return Objects.hash(joinKeys, events);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Sequence that = (Sequence) o;
return Objects.equals(joinKeys, that.joinKeys)
&& Objects.equals(events, that.events);
}
}
// Hits
public static class Hits {
public static final Hits EMPTY = new Hits(null, null, null);
private final List<Event> events;
private final List<Sequence> sequences;
private final TotalHits totalHits;
private static final class Fields {
static final String TOTAL = "total";
static final String EVENTS = "events";
static final String SEQUENCES = "sequences";
}
public Hits(@Nullable List<Event> events, @Nullable List<Sequence> sequences, @Nullable TotalHits totalHits) {
this.events = events;
this.sequences = sequences;
this.totalHits = totalHits;
}
private static final ConstructingObjectParser<EqlSearchResponse.Hits, Void> PARSER =
new ConstructingObjectParser<>("eql/search_response_hits", true,
args -> {
int i = 0;
@SuppressWarnings("unchecked") List<Event> events = (List<Event>) args[i++];
@SuppressWarnings("unchecked") List<Sequence> sequences = (List<Sequence>) args[i++];
TotalHits totalHits = (TotalHits) args[i];
return new EqlSearchResponse.Hits(events, sequences, totalHits);
});
static {
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> Event.fromXContent(p),
new ParseField(Fields.EVENTS));
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), Sequence.PARSER,
new ParseField(Fields.SEQUENCES));
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> SearchHits.parseTotalHitsFragment(p),
new ParseField(Fields.TOTAL));
}
public static Hits fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public List<Event> events() {
return this.events;
}
public List<Sequence> sequences() {
return this.sequences;
}
public TotalHits totalHits() {
return this.totalHits;
}
@Override
public int hashCode() {
return Objects.hash(events, sequences, totalHits);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Hits that = (Hits) o;
return Objects.equals(events, that.events)
&& Objects.equals(sequences, that.sequences)
&& Objects.equals(totalHits, that.totalHits);
}
}
}

View File

@ -1,24 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.eql;
import org.elasticsearch.client.Validatable;
public final class EqlStatsRequest implements Validatable {
}

View File

@ -1,121 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.eql;
import org.elasticsearch.client.NodesResponseHeader;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class EqlStatsResponse {
private final NodesResponseHeader header;
private final String clusterName;
private final List<Node> nodes;
public EqlStatsResponse(NodesResponseHeader header, String clusterName, List<Node> nodes) {
this.header = header;
this.clusterName = clusterName;
this.nodes = nodes;
}
@SuppressWarnings("unchecked")
private static final ConstructingObjectParser<EqlStatsResponse, Void>
PARSER = new ConstructingObjectParser<>("eql/stats_response", true, args -> {
int i = 0;
NodesResponseHeader header = (NodesResponseHeader) args[i++];
String clusterName = (String) args[i++];
List<Node> nodes = (List<Node>) args[i];
return new EqlStatsResponse(header, clusterName, nodes);
});
static {
PARSER.declareObject(ConstructingObjectParser.constructorArg(), NodesResponseHeader::fromXContent, new ParseField("_nodes"));
PARSER.declareString(ConstructingObjectParser.constructorArg(), new ParseField("cluster_name"));
PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(),
(p, c) -> EqlStatsResponse.Node.PARSER.apply(p, null),
new ParseField("stats"));
}
public static EqlStatsResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public NodesResponseHeader getHeader() {
return header;
}
public List<Node> getNodes() {
return nodes;
}
public String getClusterName() {
return clusterName;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EqlStatsResponse that = (EqlStatsResponse) o;
return Objects.equals(nodes, that.nodes) && Objects.equals(header, that.header) && Objects.equals(clusterName, that.clusterName);
}
@Override
public int hashCode() {
return Objects.hash(nodes, header, clusterName);
}
public static class Node {
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<Node, Void>
PARSER = new ConstructingObjectParser<>("eql/stats_response_node", true, (args, c) -> new Node((Map<String, Object>) args[0]));
static {
PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.map(), new ParseField("stats"));
}
private Map<String, Object> stats;
public Node(Map<String, Object> stats) {
this.stats = stats;
}
public Map<String, Object> getStats() {
return stats;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Node node = (Node) o;
return Objects.equals(stats, node.stats);
}
@Override
public int hashCode() {
return Objects.hash(stats);
}
}
}

View File

@ -1,201 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.eql.EqlSearchRequest;
import org.elasticsearch.client.eql.EqlSearchResponse;
import org.elasticsearch.client.eql.EqlSearchResponse.Event;
import org.elasticsearch.client.eql.EqlStatsRequest;
import org.elasticsearch.client.eql.EqlStatsResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.rest.RestStatus;
import org.junit.Before;
import java.io.IOException;
import java.time.format.DateTimeFormatter;
import java.util.Locale;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
public class EqlIT extends ESRestHighLevelClientTestCase {
private static final String INDEX_NAME = "index";
private static final int RECORD_COUNT = 40;
private static final int DIVIDER = 4;
@Before
public void setup() throws Exception {
setupRemoteClusterConfig("local_cluster");
setupData();
}
private void setupData() throws IOException {
final BulkRequest bulkRequest = new BulkRequest();
bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int i = 0; i < RECORD_COUNT; i++) {
final IndexRequest indexRequest = new IndexRequest(INDEX_NAME);
indexRequest.source(jsonBuilder()
.startObject()
.field("event_subtype_full", "already_running")
.startObject("event")
.field("category", "process")
.endObject()
.field("event_type", "foo")
.field("event_type_full", "process_event")
.field("opcode", ((i % DIVIDER) == 0) ? 1 : 0)
.field("pid", ((i % DIVIDER) == 0) ? 100 : 0)
.field("process_name", "System Idle Process")
.field("serial_event_id", i + 1)
.field("subtype", "create")
.field("@timestamp", String.format(Locale.ROOT, "2018-01-01T00:00:%02dZ", i))
.field("unique_pid", ((i % DIVIDER) == 0) ? 101 : 0)
.endObject());
bulkRequest.add(indexRequest);
}
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
assertEquals(RestStatus.OK, bulkResponse.status());
assertFalse(bulkResponse.hasFailures());
RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest(INDEX_NAME), RequestOptions.DEFAULT);
assertEquals(0, refreshResponse.getFailedShards());
}
private void assertResponse(EqlSearchResponse response, int count) {
assertNotNull(response);
assertFalse(response.isTimeout());
assertNotNull(response.hits());
assertNull(response.hits().sequences());
assertNotNull(response.hits().events());
assertThat(response.hits().events().size(), equalTo(count));
}
public void testBasicSearch() throws Exception {
EqlClient eql = highLevelClient().eql();
EqlSearchRequest request = new EqlSearchRequest("index", "process where true").size(RECORD_COUNT);
assertResponse(execute(request, eql::search, eql::searchAsync), RECORD_COUNT);
}
@SuppressWarnings("unchecked")
public void testSimpleConditionSearch() throws Exception {
EqlClient eql = highLevelClient().eql();
// test simple conditional
EqlSearchRequest request = new EqlSearchRequest("index", "foo where pid > 0");
// test with non-default event.category mapping
request.eventCategoryField("event_type").size(RECORD_COUNT);
EqlSearchResponse response = execute(request, eql::search, eql::searchAsync);
assertResponse(response, RECORD_COUNT / DIVIDER);
// test the content of the hits
for (Event hit : response.hits().events()) {
final Map<String, Object> source = hit.sourceAsMap();
final Map<String, Object> event = (Map<String, Object>) source.get("event");
assertThat(event.get("category"), equalTo("process"));
assertThat(source.get("event_type"), equalTo("foo"));
assertThat(source.get("event_type_full"), equalTo("process_event"));
assertThat(source.get("opcode"), equalTo(1));
assertThat(source.get("pid"), equalTo(100));
assertThat(source.get("process_name"), equalTo("System Idle Process"));
assertThat((int) source.get("serial_event_id"), greaterThan(0));
assertThat(source.get("unique_pid"), equalTo(101));
}
}
@SuppressWarnings("unchecked")
public void testEqualsInFilterConditionSearch() throws Exception {
EqlClient eql = highLevelClient().eql();
EqlSearchRequest request = new EqlSearchRequest("index",
"process where event_type_full == \"process_event\" and serial_event_id in (1,3,5)");
EqlSearchResponse response = execute(request, eql::search, eql::searchAsync);
assertResponse(response, 3);
// test the content of the hits
for (Event hit : response.hits().events()) {
final Map<String, Object> source = hit.sourceAsMap();
final Map<String, Object> event = (Map<String, Object>) source.get("event");
assertThat(event.get("category"), equalTo("process"));
assertThat(source.get("serial_event_id"), anyOf(equalTo(1), equalTo(3), equalTo(5)));
}
}
public void testLargeMapping() throws Exception {
final String index = "large_mapping_index";
Request doc1 = new Request(HttpPut.METHOD_NAME, "/" + index + "/_doc/1");
// use more exact fields (dates) than the default to verify that retrieval works and requesting doc values
// would fail
int PASS_DEFAULT_DOC_VALUES = IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING.get(Settings.EMPTY) + 50;
String now = DateUtils.nowWithMillisResolution().format(DateTimeFormatter.ISO_DATE_TIME);
StringBuilder sb = new StringBuilder();
sb.append("{");
for (int i = 0; i < PASS_DEFAULT_DOC_VALUES; i++) {
sb.append("\"datetime" + i + "\":\"" + now + "\"");
sb.append(",");
}
sb.append("\"event\": {\"category\": \"process\"},");
sb.append("\"@timestamp\": \"2020-02-03T12:34:56Z\",");
sb.append("\"serial_event_id\": 1");
sb.append("}");
doc1.setJsonEntity(sb.toString());
client().performRequest(doc1);
client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
EqlClient eql = highLevelClient().eql();
EqlSearchRequest request = new EqlSearchRequest(index, "process where true");
EqlSearchResponse response = execute(request, eql::search, eql::searchAsync);
assertNotNull(response);
assertNotNull(response.hits());
assertThat(response.hits().events().size(), equalTo(1));
}
// Basic test for stats
// TODO: add more tests once the stats are hooked up
public void testStats() throws Exception {
EqlClient eql = highLevelClient().eql();
EqlStatsRequest request = new EqlStatsRequest();
EqlStatsResponse response = execute(request, eql::stats, eql::statsAsync);
assertNotNull(response);
assertNotNull(response.getHeader());
assertThat(response.getHeader().getTotal(), greaterThan(0));
assertThat(response.getNodes().size(), greaterThan(0));
}
}

View File

@ -919,7 +919,6 @@ public class RestHighLevelClientTests extends ESTestCase {
apiName.startsWith("ccr.") == false &&
apiName.startsWith("enrich.") == false &&
apiName.startsWith("transform.") == false &&
apiName.startsWith("eql.") == false &&
apiName.endsWith("freeze") == false &&
apiName.endsWith("reload_analyzers") == false &&
apiName.startsWith("async_search") == false &&

View File

@ -1,90 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.eql;
import org.elasticsearch.client.AbstractRequestTestCase;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchModule;
import java.io.IOException;
import java.util.Collections;
import static org.hamcrest.Matchers.equalTo;
public class EqlSearchRequestTests extends AbstractRequestTestCase<EqlSearchRequest, org.elasticsearch.xpack.eql.action.EqlSearchRequest> {
@Override
protected EqlSearchRequest createClientTestInstance() {
EqlSearchRequest eqlSearchRequest = new EqlSearchRequest("testindex", randomAlphaOfLength(40));
if (randomBoolean()) {
eqlSearchRequest.fetchSize(randomIntBetween(1, Integer.MAX_VALUE));
}
if (randomBoolean()) {
eqlSearchRequest.size(randomInt(Integer.MAX_VALUE));
}
if (randomBoolean()) {
eqlSearchRequest.eventCategoryField(randomAlphaOfLength(10));
}
if (randomBoolean()) {
eqlSearchRequest.query(randomAlphaOfLength(10));
}
if (randomBoolean()) {
eqlSearchRequest.timestampField(randomAlphaOfLength(10));
}
if (randomBoolean()) {
eqlSearchRequest.tiebreakerField(randomAlphaOfLength(10));
}
if (randomBoolean()) {
if (randomBoolean()) {
eqlSearchRequest.filter(QueryBuilders.matchAllQuery());
} else {
eqlSearchRequest.filter(QueryBuilders.termQuery(randomAlphaOfLength(10), randomInt(100)));
}
}
return eqlSearchRequest;
}
@Override
protected org.elasticsearch.xpack.eql.action.EqlSearchRequest doParseToServerInstance(XContentParser parser) throws IOException {
return org.elasticsearch.xpack.eql.action.EqlSearchRequest.fromXContent(parser).indices("testindex");
}
@Override
protected void assertInstances(org.elasticsearch.xpack.eql.action.EqlSearchRequest serverInstance, EqlSearchRequest
clientTestInstance) {
assertThat(serverInstance.eventCategoryField(), equalTo(clientTestInstance.eventCategoryField()));
assertThat(serverInstance.timestampField(), equalTo(clientTestInstance.timestampField()));
assertThat(serverInstance.tiebreakerField(), equalTo(clientTestInstance.tiebreakerField()));
assertThat(serverInstance.filter(), equalTo(clientTestInstance.filter()));
assertThat(serverInstance.query(), equalTo(clientTestInstance.query()));
assertThat(serverInstance.indicesOptions(), equalTo(clientTestInstance.indicesOptions()));
assertThat(serverInstance.indices(), equalTo(clientTestInstance.indices()));
assertThat(serverInstance.fetchSize(), equalTo(clientTestInstance.fetchSize()));
assertThat(serverInstance.size(), equalTo(clientTestInstance.size()));
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents());
}
}

View File

@ -1,215 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.eql;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class EqlSearchResponseTests extends AbstractResponseTestCase<org.elasticsearch.xpack.eql.action.EqlSearchResponse,
EqlSearchResponse> {
private static class RandomSource implements ToXContentObject {
private final String key;
private final String value;
RandomSource(Supplier<String> randomStringSupplier) {
this.key = randomStringSupplier.get();
this.value = randomStringSupplier.get();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(key, value);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(key, value);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
RandomSource other = (RandomSource) obj;
return Objects.equals(key, other.key) && Objects.equals(value, other.value);
}
public BytesReference toBytes(XContentType type) {
try (XContentBuilder builder = XContentBuilder.builder(type.xContent())) {
toXContent(builder, ToXContent.EMPTY_PARAMS);
return BytesReference.bytes(builder);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}
static List<org.elasticsearch.xpack.eql.action.EqlSearchResponse.Event> randomEvents(XContentType xType) {
int size = randomIntBetween(1, 10);
List<org.elasticsearch.xpack.eql.action.EqlSearchResponse.Event> hits = null;
if (randomBoolean()) {
hits = new ArrayList<>();
for (int i = 0; i < size; i++) {
BytesReference bytes = new RandomSource(() -> randomAlphaOfLength(10)).toBytes(xType);
hits.add(new org.elasticsearch.xpack.eql.action.EqlSearchResponse.Event(String.valueOf(i), randomAlphaOfLength(10), bytes));
}
}
if (randomBoolean()) {
return null;
}
return hits;
}
public static org.elasticsearch.xpack.eql.action.EqlSearchResponse createRandomEventsResponse(TotalHits totalHits, XContentType xType) {
org.elasticsearch.xpack.eql.action.EqlSearchResponse.Hits hits = null;
if (randomBoolean()) {
hits = new org.elasticsearch.xpack.eql.action.EqlSearchResponse.Hits(randomEvents(xType), null, totalHits);
}
if (randomBoolean()) {
return new org.elasticsearch.xpack.eql.action.EqlSearchResponse(hits, randomIntBetween(0, 1001), randomBoolean());
} else {
return new org.elasticsearch.xpack.eql.action.EqlSearchResponse(hits, randomIntBetween(0, 1001), randomBoolean(),
randomAlphaOfLength(10), randomBoolean(), randomBoolean());
}
}
public static org.elasticsearch.xpack.eql.action.EqlSearchResponse createRandomSequencesResponse(TotalHits totalHits,
XContentType xType) {
int size = randomIntBetween(1, 10);
List<org.elasticsearch.xpack.eql.action.EqlSearchResponse.Sequence> seq = null;
if (randomBoolean()) {
List<Supplier<Object[]>> randoms = getKeysGenerators();
seq = new ArrayList<>();
for (int i = 0; i < size; i++) {
List<Object> joins = null;
if (randomBoolean()) {
joins = Arrays.asList(randomFrom(randoms).get());
}
seq.add(new org.elasticsearch.xpack.eql.action.EqlSearchResponse.Sequence(joins, randomEvents(xType)));
}
}
org.elasticsearch.xpack.eql.action.EqlSearchResponse.Hits hits = null;
if (randomBoolean()) {
hits = new org.elasticsearch.xpack.eql.action.EqlSearchResponse.Hits(null, seq, totalHits);
}
if (randomBoolean()) {
return new org.elasticsearch.xpack.eql.action.EqlSearchResponse(hits, randomIntBetween(0, 1001), randomBoolean());
} else {
return new org.elasticsearch.xpack.eql.action.EqlSearchResponse(hits, randomIntBetween(0, 1001), randomBoolean(),
randomAlphaOfLength(10), randomBoolean(), randomBoolean());
}
}
private static List<Supplier<Object[]>> getKeysGenerators() {
List<Supplier<Object[]>> randoms = new ArrayList<>();
randoms.add(() -> generateRandomStringArray(6, 11, false));
randoms.add(() -> randomArray(0, 6, Integer[]::new, ()-> randomInt()));
randoms.add(() -> randomArray(0, 6, Long[]::new, ()-> randomLong()));
randoms.add(() -> randomArray(0, 6, Boolean[]::new, ()-> randomBoolean()));
return randoms;
}
public static org.elasticsearch.xpack.eql.action.EqlSearchResponse createRandomInstance(TotalHits totalHits, XContentType xType) {
int type = between(0, 1);
switch (type) {
case 0:
return createRandomEventsResponse(totalHits, xType);
case 1:
return createRandomSequencesResponse(totalHits, xType);
default:
return null;
}
}
@Override
protected org.elasticsearch.xpack.eql.action.EqlSearchResponse createServerTestInstance(XContentType xContentType) {
TotalHits totalHits = null;
if (randomBoolean()) {
totalHits = new TotalHits(randomIntBetween(100, 1000), TotalHits.Relation.EQUAL_TO);
}
return createRandomInstance(totalHits, xContentType);
}
@Override
protected EqlSearchResponse doParseToClientInstance(XContentParser parser) throws IOException {
return EqlSearchResponse.fromXContent(parser);
}
@Override
protected void assertInstances(
org.elasticsearch.xpack.eql.action.EqlSearchResponse serverTestInstance, EqlSearchResponse clientInstance) {
assertThat(serverTestInstance.took(), is(clientInstance.took()));
assertThat(serverTestInstance.isTimeout(), is(clientInstance.isTimeout()));
assertThat(serverTestInstance.hits().totalHits(), is(clientInstance.hits().totalHits()));
if (serverTestInstance.hits().events() == null) {
assertNull(clientInstance.hits().events());
} else {
assertEvents(serverTestInstance.hits().events(), clientInstance.hits().events());
}
if (serverTestInstance.hits().sequences() == null) {
assertNull(clientInstance.hits().sequences());
} else {
assertThat(serverTestInstance.hits().sequences().size(), equalTo(clientInstance.hits().sequences().size()));
for (int i = 0; i < serverTestInstance.hits().sequences().size(); i++) {
assertThat(serverTestInstance.hits().sequences().get(i).joinKeys(),
is(clientInstance.hits().sequences().get(i).joinKeys()));
assertEvents(serverTestInstance.hits().sequences().get(i).events(), clientInstance.hits().sequences().get(i).events());
}
}
}
private void assertEvents(
List<org.elasticsearch.xpack.eql.action.EqlSearchResponse.Event> serverEvents,
List<EqlSearchResponse.Event> clientEvents
) {
assertThat(serverEvents.size(), equalTo(clientEvents.size()));
for (int j = 0; j < serverEvents.size(); j++) {
assertThat(
SourceLookup.sourceAsMap(serverEvents.get(j).source()), is(clientEvents.get(j).sourceAsMap()));
}
}
}

View File

@ -1,90 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.eql;
import org.elasticsearch.client.AbstractResponseTestCase;
import org.elasticsearch.client.NodesResponseHeader;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.hamcrest.Matchers.is;
public class EqlStatsResponseTests extends AbstractResponseTestCase<EqlStatsResponseToXContent, EqlStatsResponse> {
private static Map<String, Object> buildRandomCountersMap(int count) {
Map<String, Object> map = new HashMap<>();
for (int i = 0; i < count; i++) {
map.put(randomAlphaOfLength(10), randomIntBetween(0, Integer.MAX_VALUE));
}
return map;
}
private static Map<String, Object> buildRandomNodeStats(int featuresNumber) {
Map<String, Object> stats = new HashMap<>();
int countersNumber = randomIntBetween(0, 10);
Map<String, Object> features = new HashMap<>();
for (int i = 0; i < featuresNumber; i++) {
features.put(randomAlphaOfLength(10), buildRandomCountersMap(countersNumber));
}
stats.put("features", features);
Map<String, Object> res = new HashMap<>();
res.put("stats", stats);
return res;
}
@Override
protected EqlStatsResponseToXContent createServerTestInstance(XContentType xContentType) {
NodesResponseHeader header = new NodesResponseHeader(randomInt(10), randomInt(10),
randomInt(10), Collections.emptyList());
String clusterName = randomAlphaOfLength(10);
int nodeCount = randomInt(10);
int featuresNumber = randomIntBetween(0, 10);
List<EqlStatsResponse.Node> nodes = new ArrayList<>(nodeCount);
for (int i = 0; i < nodeCount; i++) {
Map<String, Object> stat = buildRandomNodeStats(featuresNumber);
nodes.add(new EqlStatsResponse.Node(stat));
}
EqlStatsResponse response = new EqlStatsResponse(header, clusterName, nodes);
return new EqlStatsResponseToXContent(new EqlStatsResponse(header, clusterName, nodes));
}
@Override
protected EqlStatsResponse doParseToClientInstance(XContentParser parser) throws IOException {
return EqlStatsResponse.fromXContent(parser);
}
@Override
protected void assertInstances(EqlStatsResponseToXContent serverTestInstanceWrap, EqlStatsResponse clientInstance) {
EqlStatsResponse serverTestInstance = serverTestInstanceWrap.unwrap();
assertThat(serverTestInstance, is(clientInstance));
}
}

View File

@ -1,81 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.eql;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.client.NodesResponseHeader;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.List;
public class EqlStatsResponseToXContent implements ToXContent {
private final EqlStatsResponse response;
public EqlStatsResponseToXContent(EqlStatsResponse response) {
this.response = response;
}
public EqlStatsResponse unwrap() {
return this.response;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
NodesResponseHeader header = response.getHeader();
if (header != null) {
builder.startObject("_nodes");
builder.field("total", header.getTotal());
builder.field("successful", header.getSuccessful());
builder.field("failed", header.getFailed());
if (header.getFailures().isEmpty() == false) {
builder.startArray("failures");
for (ElasticsearchException failure : header.getFailures()) {
builder.startObject();
failure.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
}
builder.endArray();
}
builder.endObject();
}
builder.field("cluster_name", response.getClusterName());
List<EqlStatsResponse.Node> nodes = response.getNodes();
if (nodes != null) {
builder.startArray("stats");
for (EqlStatsResponse.Node node : nodes) {
builder.startObject();
if (node.getStats() != null) {
builder.field("stats", node.getStats());
}
builder.endObject();
}
builder.endArray();
}
return builder;
}
}

View File

@ -129,9 +129,6 @@ def projectPathsToExclude = [
':x-pack:plugin:ccr:qa',
':x-pack:plugin:core',
':x-pack:plugin:deprecation',
':x-pack:plugin:eql',
':x-pack:plugin:eql:qa',
':x-pack:plugin:eql:qa:common',
':x-pack:plugin:frozen-indices',
':x-pack:plugin:graph',
':x-pack:plugin:identity-provider',

View File

@ -802,7 +802,6 @@ public final class ContextDocGenerator {
javaName.equals("org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalSqlScriptUtils") ||
javaName.equals("org.elasticsearch.xpack.sql.expression.literal.IntervalDayTime") ||
javaName.equals("org.elasticsearch.xpack.sql.expression.literal.IntervalYearMonth") ||
javaName.equals("org.elasticsearch.xpack.eql.expression.function.scalar.whitelist.InternalEqlScriptUtils") ||
javaName.equals("org.elasticsearch.xpack.ql.expression.function.scalar.InternalQlScriptUtils") ||
javaName.equals("org.elasticsearch.xpack.ql.expression.function.scalar.whitelist.InternalQlScriptUtils") ||
javaName.equals("org.elasticsearch.script.ScoreScript$ExplanationHolder");