[PERCOLATOR] Allowing range queries with now ranges inside percolator queries.
Before now ranges where forbidden, because the percolator query itself could get cached and then the percolator queries with now ranges that should no longer match, incorrectly will continue to match. By disabling caching when the `percolator` is being used, the percolator can now correctly support range queries with now based ranges. I think this is the right tradeoff. The percolator query is likely to not be the same between search requests and disabling range queries with now ranges really disabled people using the percolator for their use cases. Also fixed an issue that existed in the percolator fieldmapper, it was unable to find forbidden queries inside `dismax` queries. Closes #23859
This commit is contained in:
parent
ac87d40bd5
commit
3d9671a668
|
@ -76,9 +76,6 @@ fail.
|
|||
Because the `percolate` query is processing one document at a time, it doesn't support queries and filters that run
|
||||
against child documents such as `has_child` and `has_parent`.
|
||||
|
||||
The percolator doesn't accepts percolator queries containing `range` queries with ranges that are based on current
|
||||
time (using `now`).
|
||||
|
||||
There are a number of queries that fetch data via a get call during query parsing. For example the `terms` query when
|
||||
using terms lookup, `template` query when using indexed scripts and `geo_shape` when using pre-indexed shapes. When these
|
||||
queries are indexed by the `percolator` field type then the get call is executed once. So each time the `percolator`
|
||||
|
|
|
@ -192,25 +192,19 @@ final class PercolateQuery extends Query implements Accountable {
|
|||
return queryStore;
|
||||
}
|
||||
|
||||
// Comparing identity here to avoid being cached
|
||||
// Note that in theory if the same instance gets used multiple times it could still get cached,
|
||||
// however since we create a new query instance each time we this query this shouldn't happen and thus
|
||||
// this risk neglectable.
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (o == null || getClass() != o.getClass()) return false;
|
||||
if (sameClassAs(o) == false) return false;
|
||||
|
||||
PercolateQuery that = (PercolateQuery) o;
|
||||
|
||||
if (!documentType.equals(that.documentType)) return false;
|
||||
return documentSource.equals(that.documentSource);
|
||||
|
||||
return this == o;
|
||||
}
|
||||
|
||||
// Computing hashcode based on identity to avoid caching.
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = classHash();
|
||||
result = 31 * result + documentType.hashCode();
|
||||
result = 31 * result + documentSource.hashCode();
|
||||
return result;
|
||||
return System.identityHashCode(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,13 +28,13 @@ import org.apache.lucene.index.LeafReader;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.index.TermsEnum;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermInSetQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
|
@ -56,13 +56,13 @@ import org.elasticsearch.index.mapper.ParseContext;
|
|||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.BoostingQueryBuilder;
|
||||
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.DisMaxQueryBuilder;
|
||||
import org.elasticsearch.index.query.HasChildQueryBuilder;
|
||||
import org.elasticsearch.index.query.HasParentQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -374,28 +374,11 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
|
||||
/**
|
||||
* Fails if a percolator contains an unsupported query. The following queries are not supported:
|
||||
* 1) a range query with a date range based on current time
|
||||
* 2) a has_child query
|
||||
* 3) a has_parent query
|
||||
* 1) a has_child query
|
||||
* 2) a has_parent query
|
||||
*/
|
||||
static void verifyQuery(QueryBuilder queryBuilder) {
|
||||
if (queryBuilder instanceof RangeQueryBuilder) {
|
||||
RangeQueryBuilder rangeQueryBuilder = (RangeQueryBuilder) queryBuilder;
|
||||
if (rangeQueryBuilder.from() instanceof String) {
|
||||
String from = (String) rangeQueryBuilder.from();
|
||||
if (from.contains("now")) {
|
||||
throw new IllegalArgumentException("percolator queries containing time range queries based on the " +
|
||||
"current time is unsupported");
|
||||
}
|
||||
}
|
||||
if (rangeQueryBuilder.to() instanceof String) {
|
||||
String to = (String) rangeQueryBuilder.to();
|
||||
if (to.contains("now")) {
|
||||
throw new IllegalArgumentException("percolator queries containing time range queries based on the " +
|
||||
"current time is unsupported");
|
||||
}
|
||||
}
|
||||
} else if (queryBuilder instanceof HasChildQueryBuilder) {
|
||||
if (queryBuilder instanceof HasChildQueryBuilder) {
|
||||
throw new IllegalArgumentException("the [has_child] query is unsupported inside a percolator query");
|
||||
} else if (queryBuilder instanceof HasParentQueryBuilder) {
|
||||
throw new IllegalArgumentException("the [has_parent] query is unsupported inside a percolator query");
|
||||
|
@ -416,6 +399,11 @@ public class PercolatorFieldMapper extends FieldMapper {
|
|||
} else if (queryBuilder instanceof BoostingQueryBuilder) {
|
||||
verifyQuery(((BoostingQueryBuilder) queryBuilder).negativeQuery());
|
||||
verifyQuery(((BoostingQueryBuilder) queryBuilder).positiveQuery());
|
||||
} else if (queryBuilder instanceof DisMaxQueryBuilder) {
|
||||
DisMaxQueryBuilder disMaxQueryBuilder = (DisMaxQueryBuilder) queryBuilder;
|
||||
for (QueryBuilder innerQueryBuilder : disMaxQueryBuilder.innerQueries()) {
|
||||
verifyQuery(innerQueryBuilder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -281,4 +281,9 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
|
|||
protected boolean isCachable(PercolateQueryBuilder queryBuilder) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean builderGeneratesCacheableQueries() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,12 +27,12 @@ import org.apache.lucene.index.PrefixCodedTerms;
|
|||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.memory.MemoryIndex;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.PhraseQuery;
|
||||
import org.apache.lucene.search.TermInSetQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
|
@ -57,6 +57,7 @@ import org.elasticsearch.index.mapper.SourceToParse;
|
|||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.BoostingQueryBuilder;
|
||||
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
|
||||
import org.elasticsearch.index.query.DisMaxQueryBuilder;
|
||||
import org.elasticsearch.index.query.HasChildQueryBuilder;
|
||||
import org.elasticsearch.index.query.HasParentQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
|
@ -445,113 +446,27 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(e.getCause().getMessage(), equalTo("a document can only contain one percolator query"));
|
||||
}
|
||||
|
||||
public void testRangeQueryWithNowRangeIsForbidden() throws Exception {
|
||||
addQueryMapping();
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
|
||||
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
|
||||
jsonBuilder().startObject()
|
||||
.field(fieldName, rangeQuery("date_field").from("2016-01-01||/D").to("now"))
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
}
|
||||
);
|
||||
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
e = expectThrows(MapperParsingException.class, () -> {
|
||||
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
|
||||
jsonBuilder().startObject()
|
||||
.field(fieldName, rangeQuery("date_field").from("2016-01-01||/D").to("now/D"))
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
}
|
||||
);
|
||||
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
e = expectThrows(MapperParsingException.class, () -> {
|
||||
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
|
||||
jsonBuilder().startObject()
|
||||
.field(fieldName, rangeQuery("date_field").from("now-1d").to("now"))
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
}
|
||||
);
|
||||
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
e = expectThrows(MapperParsingException.class, () -> {
|
||||
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
|
||||
jsonBuilder().startObject()
|
||||
.field(fieldName, rangeQuery("date_field").from("now"))
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
}
|
||||
);
|
||||
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
e = expectThrows(MapperParsingException.class, () -> {
|
||||
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
|
||||
jsonBuilder().startObject()
|
||||
.field(fieldName, rangeQuery("date_field").to("now"))
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
}
|
||||
);
|
||||
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
}
|
||||
|
||||
// https://github.com/elastic/elasticsearch/issues/22355
|
||||
public void testVerifyRangeQueryWithNullBounds() throws Exception {
|
||||
addQueryMapping();
|
||||
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
|
||||
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
|
||||
jsonBuilder().startObject()
|
||||
.field(fieldName, rangeQuery("date_field").from("now").to(null))
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
|
||||
}
|
||||
);
|
||||
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
e = expectThrows(MapperParsingException.class, () -> {
|
||||
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
|
||||
jsonBuilder().startObject()
|
||||
.field(fieldName, rangeQuery("date_field").from(null).to("now"))
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
|
||||
}
|
||||
);
|
||||
assertThat(e.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
|
||||
// No validation failures:
|
||||
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
|
||||
jsonBuilder().startObject()
|
||||
.field(fieldName, rangeQuery("date_field").from("2016-01-01").to(null))
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
|
||||
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
|
||||
jsonBuilder().startObject()
|
||||
.field(fieldName, rangeQuery("date_field").from(null).to("2016-01-01"))
|
||||
.endObject().bytes(),
|
||||
XContentType.JSON));
|
||||
|
||||
}
|
||||
|
||||
public void testUnsupportedQueries() {
|
||||
RangeQueryBuilder rangeQuery1 = new RangeQueryBuilder("field").from("2016-01-01||/D").to("2017-01-01||/D");
|
||||
RangeQueryBuilder rangeQuery2 = new RangeQueryBuilder("field").from("2016-01-01||/D").to("now");
|
||||
PercolatorFieldMapper.verifyQuery(rangeQuery1);
|
||||
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(rangeQuery2));
|
||||
PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(rangeQuery1));
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(rangeQuery2)));
|
||||
PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder((rangeQuery1)));
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder(rangeQuery2)));
|
||||
PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(rangeQuery1, new MatchAllQueryBuilder()));
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(rangeQuery2, new MatchAllQueryBuilder())));
|
||||
PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(rangeQuery1, new RandomScoreFunctionBuilder()));
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(rangeQuery2, new RandomScoreFunctionBuilder())));
|
||||
PercolatorFieldMapper.verifyQuery(rangeQuery2);
|
||||
|
||||
HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None);
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery)));
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
PercolatorFieldMapper.verifyQuery(new DisMaxQueryBuilder().add(hasChildQuery)));
|
||||
PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder((rangeQuery1)));
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder(hasChildQuery)));
|
||||
PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(rangeQuery1, new MatchAllQueryBuilder()));
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
PercolatorFieldMapper.verifyQuery(new BoostingQueryBuilder(hasChildQuery, new MatchAllQueryBuilder())));
|
||||
PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(rangeQuery1, new RandomScoreFunctionBuilder()));
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
PercolatorFieldMapper.verifyQuery(new FunctionScoreQueryBuilder(hasChildQuery, new RandomScoreFunctionBuilder())));
|
||||
|
||||
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasChildQuery));
|
||||
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery)));
|
||||
|
||||
|
|
Loading…
Reference in New Issue