SQL: add filter support in REST action (elastic/x-pack-elasticsearch#3045)

Adds the option to specify an elasticsearch filter in addition to the SQL query by introducing a filter parameter in the REST query which would create a boolean filter if the SQL query generates an elasticsearch query or a constant score query if SQL if the SQL query doesn't generates an elasticsearch query. Usage:

{
   "query": "SELECT * FROM index",
   "filter" : {  "term" : { "tag" : "tech" } }
}

relates elastic/x-pack-elasticsearch#2895

Original commit: elastic/x-pack-elasticsearch@9a73813c7f
This commit is contained in:
Igor Motov 2017-11-21 11:40:38 -05:00 committed by GitHub
parent 3e14c30aa1
commit 2fe4da80ad
19 changed files with 335 additions and 47 deletions

View File

@ -255,7 +255,7 @@ setups['library'] = '''
{"index":{"_id": "Slaughterhouse-Five"}} {"index":{"_id": "Slaughterhouse-Five"}}
{"name": "Slaughterhouse-Five", "author": "Kurt Vonnegut", "release_date": "1969-06-01", "page_count": 275} {"name": "Slaughterhouse-Five", "author": "Kurt Vonnegut", "release_date": "1969-06-01", "page_count": 275}
{"index":{"_id": "The Hitchhiker's Guide to the Galaxy"}} {"index":{"_id": "The Hitchhiker's Guide to the Galaxy"}}
{"name": "The Hitchhiker's Guide to the Galaxy", "author": "", "release_date": "1979-10-12", "page_count": 180} {"name": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams", "release_date": "1979-10-12", "page_count": 180}
{"index":{"_id": "Snow Crash"}} {"index":{"_id": "Snow Crash"}}
{"name": "Snow Crash", "author": "Neal Stephenson", "release_date": "1992-06-01", "page_count": 470} {"name": "Snow Crash", "author": "Neal Stephenson", "release_date": "1992-06-01", "page_count": 470}
{"index":{"_id": "Neuromancer"}} {"index":{"_id": "Neuromancer"}}

View File

@ -80,6 +80,50 @@ scroll, receiving the last page is enough to guarantee that the
Elasticsearch state is cleared. For now, that is the only way to Elasticsearch state is cleared. For now, that is the only way to
clear the state. clear the state.
[[sql-rest-filtering]]
You can filter the results that SQL will run on using a standard
Elasticsearch query DSL by specifying the query in the filter
parameter.
[source,js]
--------------------------------------------------
POST /_sql
{
"query": "SELECT * FROM library ORDER BY page_count DESC",
"filter": {
"range": {
"page_count": {
"gte" : 100,
"lte" : 200
}
}
},
"fetch_size": 5
}
--------------------------------------------------
// CONSOLE
// TEST[setup:library]
Which returns:
[source,js]
--------------------------------------------------
{
"columns": [
{"name": "author", "type": "keyword"},
{"name": "name", "type": "keyword"},
{"name": "page_count", "type": "short"},
{"name": "release_date", "type": "date"}
],
"size": 1,
"rows": [
["Douglas Adams", "The Hitchhiker's Guide to the Galaxy", 180, 308534400000]
]
}
--------------------------------------------------
// TESTRESPONSE
[[sql-rest-fields]] [[sql-rest-fields]]
In addition to the `query` and `cursor` fields, the request can In addition to the `query` and `cursor` fields, the request can
contain `fetch_size` and `time_zone`. `fetch_size` is a hint for how contain `fetch_size` and `time_zone`. `fetch_size` is a hint for how

View File

@ -147,6 +147,10 @@ public abstract class RestSqlTestCase extends ESRestTestCase {
return runSql(suffix, new StringEntity("{\"query\":\"" + sql + "\"}", ContentType.APPLICATION_JSON)); return runSql(suffix, new StringEntity("{\"query\":\"" + sql + "\"}", ContentType.APPLICATION_JSON));
} }
private Map<String, Object> runSql(String sql, String filter, String suffix) throws IOException {
return runSql(suffix, new StringEntity("{\"query\":\"" + sql + "\", \"filter\":" + filter + "}", ContentType.APPLICATION_JSON));
}
private Map<String, Object> runSql(HttpEntity sql) throws IOException { private Map<String, Object> runSql(HttpEntity sql) throws IOException {
return runSql("", sql); return runSql("", sql);
} }
@ -183,4 +187,60 @@ public abstract class RestSqlTestCase extends ESRestTestCase {
assertEquals(emptyList(), source.get("excludes")); assertEquals(emptyList(), source.get("excludes"));
assertEquals(singletonList("test"), source.get("includes")); assertEquals(singletonList("test"), source.get("includes"));
} }
public void testBasicQueryWithFilter() throws IOException {
StringBuilder bulk = new StringBuilder();
bulk.append("{\"index\":{\"_id\":\"1\"}}\n");
bulk.append("{\"test\":\"foo\"}\n");
bulk.append("{\"index\":{\"_id\":\"2\"}}\n");
bulk.append("{\"test\":\"bar\"}\n");
client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"),
new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
Map<String, Object> expected = new HashMap<>();
expected.put("columns", singletonList(columnInfo("test", "text")));
expected.put("rows", singletonList(singletonList("foo")));
expected.put("size", 1);
assertResponse(expected, runSql("SELECT * FROM test", "{\"match\": {\"test\": \"foo\"}}", ""));
}
public void testBasicTranslateQueryWithFilter() throws IOException {
StringBuilder bulk = new StringBuilder();
bulk.append("{\"index\":{\"_id\":\"1\"}}\n");
bulk.append("{\"test\":\"foo\"}\n");
bulk.append("{\"index\":{\"_id\":\"2\"}}\n");
bulk.append("{\"test\":\"bar\"}\n");
client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"),
new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
Map<String, Object> response = runSql("SELECT * FROM test", "{\"match\": {\"test\": \"foo\"}}", "/translate/");
assertEquals(response.get("size"), 1000);
@SuppressWarnings("unchecked")
Map<String, Object> source = (Map<String, Object>) response.get("_source");
assertNotNull(source);
assertEquals(emptyList(), source.get("excludes"));
assertEquals(singletonList("test"), source.get("includes"));
@SuppressWarnings("unchecked")
Map<String, Object> query = (Map<String, Object>) response.get("query");
assertNotNull(query);
@SuppressWarnings("unchecked")
Map<String, Object> constantScore = (Map<String, Object>) query.get("constant_score");
assertNotNull(constantScore);
@SuppressWarnings("unchecked")
Map<String, Object> filter = (Map<String, Object>) constantScore.get("filter");
assertNotNull(filter);
@SuppressWarnings("unchecked")
Map<String, Object> match = (Map<String, Object>) filter.get("match");
assertNotNull(match);
@SuppressWarnings("unchecked")
Map<String, Object> matchQuery = (Map<String, Object>) match.get("test");
assertNotNull(matchQuery);
assertEquals("foo", matchQuery.get("query"));
}
} }

View File

@ -61,7 +61,7 @@ public class PlanExecutor {
PhysicalPlan executable = newSession(settings).executable(sql); PhysicalPlan executable = newSession(settings).executable(sql);
if (executable instanceof EsQueryExec) { if (executable instanceof EsQueryExec) {
EsQueryExec e = (EsQueryExec) executable; EsQueryExec e = (EsQueryExec) executable;
return SourceGenerator.sourceBuilder(e.queryContainer(), settings.pageSize()); return SourceGenerator.sourceBuilder(e.queryContainer(), settings.filter(), settings.pageSize());
} }
else { else {
throw new PlanningException("Cannot generate a query DSL for %s", sql); throw new PlanningException("Cannot generate a query DSL for %s", sql);

View File

@ -11,9 +11,11 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation;
@ -24,7 +26,6 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.execution.ExecutionException; import org.elasticsearch.xpack.sql.execution.ExecutionException;
import org.elasticsearch.xpack.sql.execution.search.extractor.ComputingHitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.ComputingHitExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.ConstantExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.DocValueExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.DocValueExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
import org.elasticsearch.xpack.sql.execution.search.extractor.InnerHitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.InnerHitExtractor;
@ -45,7 +46,6 @@ import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef;
import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef; import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef;
import org.elasticsearch.xpack.sql.querydsl.container.TotalCountRef; import org.elasticsearch.xpack.sql.querydsl.container.TotalCountRef;
import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Configuration;
import org.elasticsearch.xpack.sql.session.RowSet;
import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.Rows;
import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SchemaRowSet;
import org.elasticsearch.xpack.sql.type.Schema; import org.elasticsearch.xpack.sql.type.Schema;
@ -62,21 +62,24 @@ public class Scroller {
private final TimeValue keepAlive, timeout; private final TimeValue keepAlive, timeout;
private final int size; private final int size;
private final Client client; private final Client client;
@Nullable
private final QueryBuilder filter;
public Scroller(Client client, Configuration cfg) { public Scroller(Client client, Configuration cfg) {
this(client, cfg.requestTimeout(), cfg.pageTimeout(), cfg.pageSize()); this(client, cfg.requestTimeout(), cfg.pageTimeout(), cfg.filter(), cfg.pageSize());
} }
public Scroller(Client client, TimeValue keepAlive, TimeValue timeout, int size) { public Scroller(Client client, TimeValue keepAlive, TimeValue timeout, QueryBuilder filter, int size) {
this.client = client; this.client = client;
this.keepAlive = keepAlive; this.keepAlive = keepAlive;
this.timeout = timeout; this.timeout = timeout;
this.filter = filter;
this.size = size; this.size = size;
} }
public void scroll(Schema schema, QueryContainer query, String index, ActionListener<SchemaRowSet> listener) { public void scroll(Schema schema, QueryContainer query, String index, ActionListener<SchemaRowSet> listener) {
// prepare the request // prepare the request
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(query, size); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(query, filter, size);
if (log.isTraceEnabled()) { if (log.isTraceEnabled()) {
log.trace("About to execute query {} on {}", StringUtils.toString(sourceBuilder), index); log.trace("About to execute query {} on {}", StringUtils.toString(sourceBuilder), index);

View File

@ -5,6 +5,8 @@
*/ */
package org.elasticsearch.xpack.sql.execution.search; package org.elasticsearch.xpack.sql.execution.search;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder;
@ -49,12 +51,20 @@ public abstract class SourceGenerator {
private static final List<String> NO_STORED_FIELD = singletonList(StoredFieldsContext._NONE_); private static final List<String> NO_STORED_FIELD = singletonList(StoredFieldsContext._NONE_);
public static SearchSourceBuilder sourceBuilder(QueryContainer container, Integer size) { public static SearchSourceBuilder sourceBuilder(QueryContainer container, QueryBuilder filter, Integer size) {
SearchSourceBuilder source = new SearchSourceBuilder(); SearchSourceBuilder source = new SearchSourceBuilder();
// add the source // add the source
if (container.query() != null) { if (container.query() != null) {
if (filter != null) {
source.query(new BoolQueryBuilder().must(container.query().asBuilder()).filter(filter));
} else {
source.query(container.query().asBuilder()); source.query(container.query().asBuilder());
} }
} else {
if (filter != null) {
source.query(new ConstantScoreQueryBuilder(filter));
}
}
// translate fields to source-fields or script fields // translate fields to source-fields or script fields
Set<String> sourceFields = new LinkedHashSet<>(); Set<String> sourceFields = new LinkedHashSet<>();

View File

@ -79,7 +79,7 @@ public class RestSqlCliAction extends AbstractSqlProtocolRestAction {
private Consumer<RestChannel> queryInit(Client client, QueryInitRequest request) { private Consumer<RestChannel> queryInit(Client client, QueryInitRequest request) {
// TODO time zone support for CLI // TODO time zone support for CLI
SqlRequest sqlRequest = new SqlRequest(request.query, DateTimeZone.forTimeZone(request.timeZone), request.fetchSize, SqlRequest sqlRequest = new SqlRequest(request.query, null, DateTimeZone.forTimeZone(request.timeZone), request.fetchSize,
TimeValue.timeValueMillis(request.timeout.requestTimeout), TimeValue.timeValueMillis(request.timeout.requestTimeout),
TimeValue.timeValueMillis(request.timeout.pageTimeout), TimeValue.timeValueMillis(request.timeout.pageTimeout),
Cursor.EMPTY); Cursor.EMPTY);
@ -100,7 +100,7 @@ public class RestSqlCliAction extends AbstractSqlProtocolRestAction {
} catch (IOException e) { } catch (IOException e) {
throw new IllegalArgumentException("error reading the cursor"); throw new IllegalArgumentException("error reading the cursor");
} }
SqlRequest sqlRequest = new SqlRequest("", SqlRequest.DEFAULT_TIME_ZONE, 0, SqlRequest sqlRequest = new SqlRequest("", null, SqlRequest.DEFAULT_TIME_ZONE, 0,
TimeValue.timeValueMillis(request.timeout.requestTimeout), TimeValue.timeValueMillis(request.timeout.requestTimeout),
TimeValue.timeValueMillis(request.timeout.pageTimeout), TimeValue.timeValueMillis(request.timeout.pageTimeout),
cursor); cursor);

View File

@ -140,7 +140,8 @@ public class RestSqlJdbcAction extends AbstractSqlProtocolRestAction {
} }
private Consumer<RestChannel> queryInit(Client client, QueryInitRequest request) { private Consumer<RestChannel> queryInit(Client client, QueryInitRequest request) {
SqlRequest sqlRequest = new SqlRequest(request.query, DateTimeZone.forTimeZone(request.timeZone), request.fetchSize,
SqlRequest sqlRequest = new SqlRequest(request.query, null, DateTimeZone.forTimeZone(request.timeZone), request.fetchSize,
TimeValue.timeValueMillis(request.timeout.requestTimeout), TimeValue.timeValueMillis(request.timeout.requestTimeout),
TimeValue.timeValueMillis(request.timeout.pageTimeout), TimeValue.timeValueMillis(request.timeout.pageTimeout),
Cursor.EMPTY); Cursor.EMPTY);
@ -167,7 +168,7 @@ public class RestSqlJdbcAction extends AbstractSqlProtocolRestAction {
throw new IllegalArgumentException("error reading the cursor"); throw new IllegalArgumentException("error reading the cursor");
} }
// NB: the timezone and page size are locked already by the query so pass in defaults (as they are not read anyway) // NB: the timezone and page size are locked already by the query so pass in defaults (as they are not read anyway)
SqlRequest sqlRequest = new SqlRequest(EMPTY, SqlRequest.DEFAULT_TIME_ZONE, 0, SqlRequest sqlRequest = new SqlRequest(EMPTY, null, SqlRequest.DEFAULT_TIME_ZONE, 0,
TimeValue.timeValueMillis(request.timeout.requestTimeout), TimeValue.timeValueMillis(request.timeout.requestTimeout),
TimeValue.timeValueMillis(request.timeout.pageTimeout), TimeValue.timeValueMillis(request.timeout.pageTimeout),
cursor); cursor);

View File

@ -19,13 +19,13 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest;
@ -38,6 +38,9 @@ import org.elasticsearch.xpack.sql.plugin.sql.action.AbstractSqlRequest;
import org.elasticsearch.xpack.sql.session.Configuration; import org.elasticsearch.xpack.sql.session.Configuration;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import java.io.IOException;
import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.action.ValidateActions.addValidationError;
import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.POST;
@ -46,9 +49,6 @@ import static org.elasticsearch.xpack.sql.plugin.sql.action.AbstractSqlRequest.D
import static org.elasticsearch.xpack.sql.plugin.sql.action.AbstractSqlRequest.DEFAULT_REQUEST_TIMEOUT; import static org.elasticsearch.xpack.sql.plugin.sql.action.AbstractSqlRequest.DEFAULT_REQUEST_TIMEOUT;
import static org.elasticsearch.xpack.sql.plugin.sql.action.AbstractSqlRequest.DEFAULT_TIME_ZONE; import static org.elasticsearch.xpack.sql.plugin.sql.action.AbstractSqlRequest.DEFAULT_TIME_ZONE;
import java.io.IOException;
import java.util.Objects;
public class SqlTranslateAction public class SqlTranslateAction
extends Action<SqlTranslateAction.Request, SqlTranslateAction.Response, SqlTranslateAction.RequestBuilder> { extends Action<SqlTranslateAction.Request, SqlTranslateAction.Response, SqlTranslateAction.RequestBuilder> {
@ -74,8 +74,9 @@ public class SqlTranslateAction
public Request() {} public Request() {}
public Request(String query, DateTimeZone timeZone, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout) { public Request(String query, QueryBuilder filter, DateTimeZone timeZone, int fetchSize, TimeValue requestTimeout,
super(query, timeZone, fetchSize, requestTimeout, pageTimeout); TimeValue pageTimeout) {
super(query, filter, timeZone, fetchSize, requestTimeout, pageTimeout);
} }
@Override @Override
@ -89,18 +90,18 @@ public class SqlTranslateAction
@Override @Override
public String getDescription() { public String getDescription() {
return "SQL Translate [" + query() + "]"; return "SQL Translate [" + query() + "][" + filter() + "]";
} }
} }
public static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> { public static class RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder> {
public RequestBuilder(ElasticsearchClient client, SqlTranslateAction action) { public RequestBuilder(ElasticsearchClient client, SqlTranslateAction action) {
this(client, action, null, DEFAULT_TIME_ZONE, DEFAULT_FETCH_SIZE, DEFAULT_REQUEST_TIMEOUT, DEFAULT_PAGE_TIMEOUT); this(client, action, null, null, DEFAULT_TIME_ZONE, DEFAULT_FETCH_SIZE, DEFAULT_REQUEST_TIMEOUT, DEFAULT_PAGE_TIMEOUT);
} }
public RequestBuilder(ElasticsearchClient client, SqlTranslateAction action, String query, DateTimeZone timeZone, public RequestBuilder(ElasticsearchClient client, SqlTranslateAction action, String query, QueryBuilder filter,
int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout) { DateTimeZone timeZone, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout) {
super(client, action, new Request(query, timeZone, fetchSize, requestTimeout, pageTimeout)); super(client, action, new Request(query, filter, timeZone, fetchSize, requestTimeout, pageTimeout));
} }
public RequestBuilder query(String query) { public RequestBuilder query(String query) {
@ -186,7 +187,7 @@ public class SqlTranslateAction
String query = request.query(); String query = request.query();
Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(),
request.requestTimeout(), request.pageTimeout()); request.requestTimeout(), request.pageTimeout(), request.filter());
listener.onResponse(new Response(planExecutor.searchSource(query, cfg))); listener.onResponse(new Response(planExecutor.searchSource(query, cfg)));
} }

View File

@ -10,10 +10,13 @@ package org.elasticsearch.xpack.sql.plugin.sql.action;
import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.CompositeIndicesRequest;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.inject.internal.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.xpack.sql.protocol.shared.AbstractQueryInitRequest; import org.elasticsearch.xpack.sql.protocol.shared.AbstractQueryInitRequest;
import org.elasticsearch.xpack.sql.protocol.shared.TimeoutInfo; import org.elasticsearch.xpack.sql.protocol.shared.TimeoutInfo;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
@ -34,12 +37,14 @@ public abstract class AbstractSqlRequest extends ActionRequest implements Compos
private int fetchSize = DEFAULT_FETCH_SIZE; private int fetchSize = DEFAULT_FETCH_SIZE;
private TimeValue requestTimeout = DEFAULT_REQUEST_TIMEOUT; private TimeValue requestTimeout = DEFAULT_REQUEST_TIMEOUT;
private TimeValue pageTimeout = DEFAULT_PAGE_TIMEOUT; private TimeValue pageTimeout = DEFAULT_PAGE_TIMEOUT;
@Nullable
private QueryBuilder filter = null;
public AbstractSqlRequest() { public AbstractSqlRequest() {
super(); super();
} }
public AbstractSqlRequest(String query, DateTimeZone timeZone, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout) { public AbstractSqlRequest(String query, QueryBuilder filter, DateTimeZone timeZone, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout) {
this.query = query; this.query = query;
this.timeZone = timeZone; this.timeZone = timeZone;
this.fetchSize = fetchSize; this.fetchSize = fetchSize;
@ -59,6 +64,9 @@ public abstract class AbstractSqlRequest extends ActionRequest implements Compos
parser.declareString( parser.declareString(
(request, timeout) -> request.pageTimeout(TimeValue.parseTimeValue(timeout, DEFAULT_PAGE_TIMEOUT, "page_timeout")), (request, timeout) -> request.pageTimeout(TimeValue.parseTimeValue(timeout, DEFAULT_PAGE_TIMEOUT, "page_timeout")),
new ParseField("page_timeout")); new ParseField("page_timeout"));
parser.declareObject(AbstractSqlRequest::filter,
(p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), new ParseField("filter"));
return parser; return parser;
} }
@ -123,6 +131,22 @@ public abstract class AbstractSqlRequest extends ActionRequest implements Compos
return this; return this;
} }
/**
* An optional Query DSL defined query that can added as a filter on the top of the SQL query
*/
public AbstractSqlRequest filter(QueryBuilder filter) {
this.filter = filter;
return this;
}
/**
* An optional Query DSL defined query that can added as a filter on the top of the SQL query
*/
public QueryBuilder filter() {
return filter;
}
@Override @Override
public void readFrom(StreamInput in) throws IOException { public void readFrom(StreamInput in) throws IOException {
super.readFrom(in); super.readFrom(in);
@ -131,6 +155,7 @@ public abstract class AbstractSqlRequest extends ActionRequest implements Compos
fetchSize = in.readVInt(); fetchSize = in.readVInt();
requestTimeout = new TimeValue(in); requestTimeout = new TimeValue(in);
pageTimeout = new TimeValue(in); pageTimeout = new TimeValue(in);
filter = in.readOptionalNamedWriteable(QueryBuilder.class);
} }
@Override @Override
@ -141,11 +166,12 @@ public abstract class AbstractSqlRequest extends ActionRequest implements Compos
out.writeVInt(fetchSize); out.writeVInt(fetchSize);
requestTimeout.writeTo(out); requestTimeout.writeTo(out);
pageTimeout.writeTo(out); pageTimeout.writeTo(out);
out.writeOptionalNamedWriteable(filter);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(query, timeZone, fetchSize, requestTimeout, pageTimeout); return Objects.hash(query, timeZone, fetchSize, requestTimeout, pageTimeout, filter);
} }
@Override @Override
@ -163,6 +189,7 @@ public abstract class AbstractSqlRequest extends ActionRequest implements Compos
&& Objects.equals(timeZone, other.timeZone) && Objects.equals(timeZone, other.timeZone)
&& fetchSize == other.fetchSize && fetchSize == other.fetchSize
&& Objects.equals(requestTimeout, other.requestTimeout) && Objects.equals(requestTimeout, other.requestTimeout)
&& Objects.equals(pageTimeout, other.pageTimeout); && Objects.equals(pageTimeout, other.pageTimeout)
&& Objects.equals(filter, other.filter);
} }
} }

View File

@ -12,6 +12,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursor;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
@ -25,17 +27,21 @@ public class SqlRequest extends AbstractSqlRequest {
public static final ObjectParser<SqlRequest, Void> PARSER = objectParser(SqlRequest::new); public static final ObjectParser<SqlRequest, Void> PARSER = objectParser(SqlRequest::new);
public static final ParseField CURSOR = new ParseField("cursor"); public static final ParseField CURSOR = new ParseField("cursor");
public static final ParseField FILTER = new ParseField("filter");
static { static {
PARSER.declareString((request, nextPage) -> request.cursor(Cursor.decodeFromString(nextPage)), CURSOR); PARSER.declareString((request, nextPage) -> request.cursor(Cursor.decodeFromString(nextPage)), CURSOR);
PARSER.declareObject(SqlRequest::filter,
(p, c) -> AbstractQueryBuilder.parseInnerQueryBuilder(p), FILTER);
} }
private Cursor cursor = Cursor.EMPTY; private Cursor cursor = Cursor.EMPTY;
public SqlRequest() {} public SqlRequest() {}
public SqlRequest(String query, DateTimeZone timeZone, int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, Cursor cursor) { public SqlRequest(String query, QueryBuilder filter, DateTimeZone timeZone, int fetchSize, TimeValue requestTimeout,
super(query, timeZone, fetchSize, requestTimeout, pageTimeout); TimeValue pageTimeout, Cursor cursor) {
super(query, filter, timeZone, fetchSize, requestTimeout, pageTimeout);
this.cursor = cursor; this.cursor = cursor;
} }
@ -93,6 +99,6 @@ public class SqlRequest extends AbstractSqlRequest {
@Override @Override
public String getDescription() { public String getDescription() {
return "SQL [" + query() + "]"; return "SQL [" + query() + "][" + filter() + "]";
} }
} }

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.plugin.sql.action;
import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursor;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
@ -19,12 +20,12 @@ import static org.elasticsearch.xpack.sql.plugin.sql.action.AbstractSqlRequest.D
public class SqlRequestBuilder extends ActionRequestBuilder<SqlRequest, SqlResponse, SqlRequestBuilder> { public class SqlRequestBuilder extends ActionRequestBuilder<SqlRequest, SqlResponse, SqlRequestBuilder> {
public SqlRequestBuilder(ElasticsearchClient client, SqlAction action) { public SqlRequestBuilder(ElasticsearchClient client, SqlAction action) {
this(client, action, "", DEFAULT_TIME_ZONE, DEFAULT_FETCH_SIZE, DEFAULT_REQUEST_TIMEOUT, DEFAULT_PAGE_TIMEOUT, Cursor.EMPTY); this(client, action, "", null, DEFAULT_TIME_ZONE, DEFAULT_FETCH_SIZE, DEFAULT_REQUEST_TIMEOUT, DEFAULT_PAGE_TIMEOUT, Cursor.EMPTY);
} }
public SqlRequestBuilder(ElasticsearchClient client, SqlAction action, String query, DateTimeZone timeZone, int fetchSize, public SqlRequestBuilder(ElasticsearchClient client, SqlAction action, String query, QueryBuilder filter, DateTimeZone timeZone,
TimeValue requestTimeout, TimeValue pageTimeout, Cursor nextPageInfo) { int fetchSize, TimeValue requestTimeout, TimeValue pageTimeout, Cursor nextPageInfo) {
super(client, action, new SqlRequest(query, timeZone, fetchSize, requestTimeout, pageTimeout, nextPageInfo)); super(client, action, new SqlRequest(query, filter, timeZone, fetchSize, requestTimeout, pageTimeout, nextPageInfo));
} }
public SqlRequestBuilder query(String query) { public SqlRequestBuilder query(String query) {
@ -32,6 +33,11 @@ public class SqlRequestBuilder extends ActionRequestBuilder<SqlRequest, SqlRespo
return this; return this;
} }
public SqlRequestBuilder filter(QueryBuilder filter) {
request.filter(filter);
return this;
}
public SqlRequestBuilder nextPageKey(Cursor nextPageInfo) { public SqlRequestBuilder nextPageKey(Cursor nextPageInfo) {
request.cursor(nextPageInfo); request.cursor(nextPageInfo);
return this; return this;

View File

@ -55,7 +55,8 @@ public class TransportSqlAction extends HandledTransportAction<SqlRequest, SqlRe
public static void operation(PlanExecutor planExecutor, SqlRequest request, ActionListener<SqlResponse> listener) { public static void operation(PlanExecutor planExecutor, SqlRequest request, ActionListener<SqlResponse> listener) {
// The configuration is always created however when dealing with the next page, only the timeouts are relevant // The configuration is always created however when dealing with the next page, only the timeouts are relevant
// the rest having default values (since the query is already created) // the rest having default values (since the query is already created)
Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), request.requestTimeout(), request.pageTimeout()); Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), request.requestTimeout(), request.pageTimeout(),
request.filter());
if (request.cursor() == Cursor.EMPTY) { if (request.cursor() == Cursor.EMPTY) {
planExecutor.sql(cfg, request.query(), planExecutor.sql(cfg, request.query(),

View File

@ -360,7 +360,7 @@ public class QueryContainer {
public String toString() { public String toString() {
try (XContentBuilder builder = JsonXContent.contentBuilder()) { try (XContentBuilder builder = JsonXContent.contentBuilder()) {
builder.humanReadable(true).prettyPrint(); builder.humanReadable(true).prettyPrint();
SourceGenerator.sourceBuilder(this, null).toXContent(builder, ToXContent.EMPTY_PARAMS); SourceGenerator.sourceBuilder(this, null, null).toXContent(builder, ToXContent.EMPTY_PARAMS);
return builder.string(); return builder.string();
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("error rendering", e); throw new RuntimeException("error rendering", e);

View File

@ -6,8 +6,10 @@
package org.elasticsearch.xpack.sql.session; package org.elasticsearch.xpack.sql.session;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.xpack.sql.plugin.sql.action.AbstractSqlRequest; import org.elasticsearch.xpack.sql.plugin.sql.action.AbstractSqlRequest;
import org.elasticsearch.xpack.sql.protocol.shared.AbstractQueryInitRequest; import org.elasticsearch.xpack.sql.protocol.shared.AbstractQueryInitRequest;
import org.elasticsearch.xpack.sql.protocol.shared.Nullable;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
// Typed object holding properties for a given // Typed object holding properties for a given
@ -15,18 +17,22 @@ public class Configuration {
public static final Configuration DEFAULT = new Configuration(DateTimeZone.UTC, public static final Configuration DEFAULT = new Configuration(DateTimeZone.UTC,
AbstractQueryInitRequest.DEFAULT_FETCH_SIZE, AbstractQueryInitRequest.DEFAULT_FETCH_SIZE,
AbstractSqlRequest.DEFAULT_REQUEST_TIMEOUT, AbstractSqlRequest.DEFAULT_REQUEST_TIMEOUT,
AbstractSqlRequest.DEFAULT_PAGE_TIMEOUT); AbstractSqlRequest.DEFAULT_PAGE_TIMEOUT,
null);
private DateTimeZone timeZone; private DateTimeZone timeZone;
private int pageSize; private int pageSize;
private TimeValue requestTimeout; private TimeValue requestTimeout;
private TimeValue pageTimeout; private TimeValue pageTimeout;
@Nullable
private QueryBuilder filter;
public Configuration(DateTimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout) { public Configuration(DateTimeZone tz, int pageSize, TimeValue requestTimeout, TimeValue pageTimeout, QueryBuilder filter) {
this.timeZone = tz; this.timeZone = tz;
this.pageSize = pageSize; this.pageSize = pageSize;
this.requestTimeout = requestTimeout; this.requestTimeout = requestTimeout;
this.pageTimeout = pageTimeout; this.pageTimeout = pageTimeout;
this.filter = filter;
} }
public DateTimeZone timeZone() { public DateTimeZone timeZone() {
@ -44,4 +50,8 @@ public class Configuration {
public TimeValue pageTimeout() { public TimeValue pageTimeout() {
return pageTimeout; return pageTimeout;
} }
public QueryBuilder filter() {
return filter;
}
} }

View File

@ -0,0 +1,41 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.RangeQueryBuilder;
import java.util.Random;
public final class SqlTestUtils {
private SqlTestUtils() {
}
/**
* Returns a random QueryBuilder or null
*/
public static QueryBuilder randomFilterOrNull(Random random) {
final QueryBuilder randomFilter;
if (random.nextBoolean()) {
randomFilter = randomFilter(random);
} else {
randomFilter = null;
}
return randomFilter;
}
/**
* Returns a random QueryBuilder
*/
public static QueryBuilder randomFilter(Random random) {
return new RangeQueryBuilder(RandomStrings.randomAsciiLettersOfLength(random, 10))
.gt(random.nextInt());
}
}

View File

@ -0,0 +1,47 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.execution.search;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer;
import org.elasticsearch.xpack.sql.querydsl.query.MatchQuery;
import org.elasticsearch.xpack.sql.tree.Location;
public class SourceGeneratorTests extends ESTestCase {
public void testNoQueryNoFilter() {
QueryContainer container = new QueryContainer();
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertEquals(sourceBuilder.query(), null);
}
public void testQueryNoFilter() {
QueryContainer container = new QueryContainer().with(new MatchQuery(Location.EMPTY, "foo", "bar"));
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
assertEquals(sourceBuilder.query(), new MatchQueryBuilder("foo", "bar").operator(Operator.AND));
}
public void testNoQueryFilter() {
QueryContainer container = new QueryContainer();
QueryBuilder filter = new MatchQueryBuilder("bar", "baz");
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, filter, randomIntBetween(1, 10));
assertEquals(sourceBuilder.query(), new ConstantScoreQueryBuilder(new MatchQueryBuilder("bar", "baz")));
}
public void testQueryFilter() {
QueryContainer container = new QueryContainer().with(new MatchQuery(Location.EMPTY, "foo", "bar"));
QueryBuilder filter = new MatchQueryBuilder("bar", "baz");
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, filter, randomIntBetween(1, 10));
assertEquals(sourceBuilder.query(), new BoolQueryBuilder().must( new MatchQueryBuilder("foo", "bar").operator(Operator.AND))
.filter(new MatchQueryBuilder("bar", "baz")));
}
}

View File

@ -5,17 +5,25 @@
*/ */
package org.elasticsearch.xpack.sql.plugin; package org.elasticsearch.xpack.sql.plugin;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.test.AbstractStreamableTestCase;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils.MutateFunction; import org.elasticsearch.test.EqualsHashCodeTestUtils.MutateFunction;
import java.util.Collections;
import static org.elasticsearch.xpack.sql.SqlTestUtils.randomFilter;
import static org.elasticsearch.xpack.sql.SqlTestUtils.randomFilterOrNull;
public class SqlTranslateRequestTests extends AbstractStreamableTestCase<SqlTranslateAction.Request> { public class SqlTranslateRequestTests extends AbstractStreamableTestCase<SqlTranslateAction.Request> {
@Override @Override
protected SqlTranslateAction.Request createTestInstance() { protected SqlTranslateAction.Request createTestInstance() {
return new SqlTranslateAction.Request(randomAlphaOfLength(10), randomDateTimeZone(), between(1, Integer.MAX_VALUE), return new SqlTranslateAction.Request(randomAlphaOfLength(10), randomFilterOrNull(random()), randomDateTimeZone(),
randomTV(), randomTV()); between(1, Integer.MAX_VALUE), randomTV(), randomTV());
} }
private TimeValue randomTV() { private TimeValue randomTV() {
@ -40,6 +48,15 @@ public class SqlTranslateRequestTests extends AbstractStreamableTestCase<SqlTran
request -> (SqlTranslateAction.Request) getCopyFunction().copy(request) request -> (SqlTranslateAction.Request) getCopyFunction().copy(request)
.requestTimeout(randomValueOtherThan(request.requestTimeout(), () -> randomTV())), .requestTimeout(randomValueOtherThan(request.requestTimeout(), () -> randomTV())),
request -> (SqlTranslateAction.Request) getCopyFunction().copy(request) request -> (SqlTranslateAction.Request) getCopyFunction().copy(request)
.pageTimeout(randomValueOtherThan(request.pageTimeout(), () -> randomTV()))); .pageTimeout(randomValueOtherThan(request.pageTimeout(), () -> randomTV())),
request -> (SqlTranslateAction.Request) getCopyFunction().copy(request).filter(randomValueOtherThan(request.filter(),
() -> request.filter() == null ? randomFilter(random()) : randomFilterOrNull(random()))));
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
// We need this for QueryBuilder serialization
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
return new NamedWriteableRegistry(searchModule.getNamedWriteables());
} }
} }

View File

@ -6,24 +6,36 @@
package org.elasticsearch.xpack.sql.plugin.sql.action; package org.elasticsearch.xpack.sql.plugin.sql.action;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.test.AbstractStreamableTestCase;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils.MutateFunction; import org.elasticsearch.test.EqualsHashCodeTestUtils.MutateFunction;
import org.elasticsearch.xpack.sql.plugin.SqlPlugin; import org.elasticsearch.xpack.sql.plugin.SqlPlugin;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static org.elasticsearch.xpack.sql.SqlTestUtils.randomFilter;
import static org.elasticsearch.xpack.sql.SqlTestUtils.randomFilterOrNull;
import static org.elasticsearch.xpack.sql.plugin.sql.action.SqlResponseTests.randomCursor; import static org.elasticsearch.xpack.sql.plugin.sql.action.SqlResponseTests.randomCursor;
public class SqlRequestTests extends AbstractStreamableTestCase<SqlRequest> { public class SqlRequestTests extends AbstractStreamableTestCase<SqlRequest> {
@Override @Override
protected NamedWriteableRegistry getNamedWriteableRegistry() { protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(SqlPlugin.getNamedWriteables()); SearchModule searchModule = new SearchModule(Settings.EMPTY, false, Collections.emptyList());
List<NamedWriteableRegistry.Entry> namedWriteables = new ArrayList<>();
namedWriteables.addAll(searchModule.getNamedWriteables());
namedWriteables.addAll(SqlPlugin.getNamedWriteables());
return new NamedWriteableRegistry(namedWriteables);
} }
@Override @Override
protected SqlRequest createTestInstance() { protected SqlRequest createTestInstance() {
return new SqlRequest(randomAlphaOfLength(10), randomDateTimeZone(), between(1, Integer.MAX_VALUE), return new SqlRequest(randomAlphaOfLength(10), randomFilterOrNull(random()), randomDateTimeZone(),
randomTV(), randomTV(), randomCursor()); between(1, Integer.MAX_VALUE), randomTV(), randomTV(), randomCursor());
} }
private TimeValue randomTV() { private TimeValue randomTV() {
@ -50,6 +62,8 @@ public class SqlRequestTests extends AbstractStreamableTestCase<SqlRequest> {
request -> (SqlRequest) getCopyFunction().copy(request) request -> (SqlRequest) getCopyFunction().copy(request)
.requestTimeout(randomValueOtherThan(request.requestTimeout(), () -> randomTV())), .requestTimeout(randomValueOtherThan(request.requestTimeout(), () -> randomTV())),
request -> (SqlRequest) getCopyFunction().copy(request) request -> (SqlRequest) getCopyFunction().copy(request)
.pageTimeout(randomValueOtherThan(request.pageTimeout(), () -> randomTV()))); .pageTimeout(randomValueOtherThan(request.pageTimeout(), () -> randomTV())),
request -> (SqlRequest) getCopyFunction().copy(request).filter(randomValueOtherThan(request.filter(),
() -> request.filter() == null ? randomFilter(random()) : randomFilterOrNull(random()))));
} }
} }