Search: deprecate the limit filter.
This is really a Collector instead of a filter. This commit deprecates the `limit` filter, makes it a no-op and recommends to use the `terminate_after` parameter instead that we introduced in the meantime.
This commit is contained in:
parent
bad6656f02
commit
5b3cc2f07c
|
@ -383,3 +383,6 @@ be used separately to control whether `routing_nodes` should be returned.
|
|||
=== Query DSL
|
||||
|
||||
The `fuzzy_like_this` and `fuzzy_like_this_field` queries have been removed.
|
||||
|
||||
The `limit` filter is deprecated and becomes a no-op. You can achieve similar
|
||||
behaviour using the <<search-request-body,terminate_after>> parameter.
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
[[query-dsl-limit-filter]]
|
||||
=== Limit Filter
|
||||
|
||||
deprecated[2.0.0, Use <<search-request-body,terminate_after>> instead]
|
||||
|
||||
A limit filter limits the number of documents (per shard) to execute on.
|
||||
For example:
|
||||
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.lucene.search;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocValuesDocIdSet;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
||||
public class LimitFilter extends NoCacheFilter {
|
||||
|
||||
private final int limit;
|
||||
private int counter;
|
||||
|
||||
public LimitFilter(int limit) {
|
||||
this.limit = limit;
|
||||
}
|
||||
|
||||
public int getLimit() {
|
||||
return limit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException {
|
||||
if (counter > limit) {
|
||||
return null;
|
||||
}
|
||||
return new LimitDocIdSet(context.reader().maxDoc(), acceptDocs, limit);
|
||||
}
|
||||
|
||||
public class LimitDocIdSet extends DocValuesDocIdSet {
|
||||
|
||||
private final int limit;
|
||||
|
||||
public LimitDocIdSet(int maxDoc, @Nullable Bits acceptDocs, int limit) {
|
||||
super(maxDoc, acceptDocs);
|
||||
this.limit = limit;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean matchDoc(int doc) {
|
||||
if (++counter > limit) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long ramBytesUsed() {
|
||||
return RamUsageEstimator.NUM_BYTES_INT;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(String field) {
|
||||
return "limit(limit=" + limit + ")";
|
||||
}
|
||||
}
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
@ -39,7 +40,9 @@ public abstract class FilterBuilders {
|
|||
|
||||
/**
|
||||
* A filter that limits the results to the provided limit value (per shard!).
|
||||
* @deprecated Use {@link SearchRequestBuilder#setTerminateAfter(int)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static LimitFilterBuilder limitFilter(int limit) {
|
||||
return new LimitFilterBuilder(limit);
|
||||
}
|
||||
|
|
|
@ -19,10 +19,15 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link SearchRequestBuilder#setTerminateAfter(int)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public class LimitFilterBuilder extends BaseFilterBuilder {
|
||||
|
||||
private final int limit;
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.LimitFilter;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -62,6 +62,7 @@ public class LimitFilterParser implements FilterParser {
|
|||
throw new QueryParsingException(parseContext.index(), "No value specified for limit filter");
|
||||
}
|
||||
|
||||
return new LimitFilter(limit);
|
||||
// this filter is deprecated and parses to a filter that matches everything
|
||||
return Queries.MATCH_ALL_FILTER;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -303,7 +303,7 @@ public class CountQueryTests extends ElasticsearchIntegrationTest {
|
|||
client().prepareIndex("test", "type1", "4").setSource("field3", "value3_4"));
|
||||
|
||||
CountResponse countResponse = client().prepareCount().setQuery(filteredQuery(matchAllQuery(), limitFilter(2))).get();
|
||||
assertHitCount(countResponse, 2l);
|
||||
assertHitCount(countResponse, 4l); // limit is a no-op
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -1280,20 +1280,6 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
|
|||
assertThat(getTerm(filteredQuery.getFilter()), equalTo(new Term("name.last", "banon")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLimitFilter() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/limit-filter.json");
|
||||
Query parsedQuery = queryParser.parse(query).query();
|
||||
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
|
||||
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
|
||||
assertThat(filteredQuery.getFilter(), instanceOf(LimitFilter.class));
|
||||
assertThat(((LimitFilter) filteredQuery.getFilter()).getLimit(), equalTo(2));
|
||||
|
||||
assertThat(filteredQuery.getQuery(), instanceOf(TermQuery.class));
|
||||
assertThat(((TermQuery) filteredQuery.getQuery()).getTerm(), equalTo(new Term("name.first", "shay")));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTermFilterQuery() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"filtered":{
|
||||
"filter":{
|
||||
"limit":{
|
||||
"value":2
|
||||
}
|
||||
},
|
||||
"query":{
|
||||
"term":{
|
||||
"name.first":"shay"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -677,7 +677,7 @@ public class SearchQueryTests extends ElasticsearchIntegrationTest {
|
|||
client().prepareIndex("test", "type1", "3").setSource("field2", "value2_3"),
|
||||
client().prepareIndex("test", "type1", "4").setSource("field3", "value3_4"));
|
||||
|
||||
assertHitCount(client().prepareSearch().setQuery(filteredQuery(matchAllQuery(), limitFilter(2))).get(), 2l);
|
||||
assertHitCount(client().prepareSearch().setQuery(filteredQuery(matchAllQuery(), limitFilter(2))).get(), 4l); // no-op
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
Loading…
Reference in New Issue