Query DSL: Add limit filter, closes #976.
This commit is contained in:
parent
d299672545
commit
ae9857cb45
|
@ -0,0 +1,61 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elastic Search and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. Elastic Search licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.common.lucene.search;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.IndexReader;
|
||||||
|
import org.apache.lucene.search.DocIdSet;
|
||||||
|
import org.elasticsearch.common.lucene.docset.GetDocSet;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class LimitFilter extends NoCacheFilter {
|
||||||
|
|
||||||
|
private final int limit;
|
||||||
|
|
||||||
|
public LimitFilter(int limit) {
|
||||||
|
this.limit = limit;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getLimit() {
|
||||||
|
return limit;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
|
||||||
|
return new LimitDocSet(reader.maxDoc(), limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class LimitDocSet extends GetDocSet {
|
||||||
|
|
||||||
|
private final int limit;
|
||||||
|
private int counter;
|
||||||
|
|
||||||
|
public LimitDocSet(int maxDoc, int limit) {
|
||||||
|
super(maxDoc);
|
||||||
|
this.limit = limit;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override public boolean get(int doc) throws IOException {
|
||||||
|
if (++counter > limit) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,29 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elastic Search and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. Elastic Search licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.common.lucene.search;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.Filter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A marker interface for {@link org.apache.lucene.search.Filter} denoting the filter
|
||||||
|
* as one that should not be cached, ever.
|
||||||
|
*/
|
||||||
|
public abstract class NoCacheFilter extends Filter {
|
||||||
|
}
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.Filter;
|
||||||
import org.elasticsearch.common.RamUsage;
|
import org.elasticsearch.common.RamUsage;
|
||||||
import org.elasticsearch.common.lab.LongsLAB;
|
import org.elasticsearch.common.lab.LongsLAB;
|
||||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||||
|
import org.elasticsearch.common.lucene.search.NoCacheFilter;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
|
@ -121,6 +122,9 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public Filter cache(Filter filterToCache) {
|
@Override public Filter cache(Filter filterToCache) {
|
||||||
|
if (filterToCache instanceof NoCacheFilter) {
|
||||||
|
return filterToCache;
|
||||||
|
}
|
||||||
if (isCached(filterToCache)) {
|
if (isCached(filterToCache)) {
|
||||||
return filterToCache;
|
return filterToCache;
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.concurrentlinkedhashmap.EvictionListener;
|
||||||
import org.elasticsearch.common.concurrentlinkedhashmap.Weigher;
|
import org.elasticsearch.common.concurrentlinkedhashmap.Weigher;
|
||||||
import org.elasticsearch.common.lab.LongsLAB;
|
import org.elasticsearch.common.lab.LongsLAB;
|
||||||
import org.elasticsearch.common.lucene.docset.DocSet;
|
import org.elasticsearch.common.lucene.docset.DocSet;
|
||||||
|
import org.elasticsearch.common.lucene.search.NoCacheFilter;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||||
|
@ -138,6 +139,9 @@ public abstract class AbstractWeightedFilterCache extends AbstractIndexComponent
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override public Filter cache(Filter filterToCache) {
|
@Override public Filter cache(Filter filterToCache) {
|
||||||
|
if (filterToCache instanceof NoCacheFilter) {
|
||||||
|
return filterToCache;
|
||||||
|
}
|
||||||
if (isCached(filterToCache)) {
|
if (isCached(filterToCache)) {
|
||||||
return filterToCache;
|
return filterToCache;
|
||||||
}
|
}
|
||||||
|
|
|
@ -255,6 +255,7 @@ public class IndexQueryParserModule extends AbstractModule {
|
||||||
bindings.processXContentQueryFilter(HasChildFilterParser.NAME, HasChildFilterParser.class);
|
bindings.processXContentQueryFilter(HasChildFilterParser.NAME, HasChildFilterParser.class);
|
||||||
bindings.processXContentQueryFilter(TypeFilterParser.NAME, TypeFilterParser.class);
|
bindings.processXContentQueryFilter(TypeFilterParser.NAME, TypeFilterParser.class);
|
||||||
bindings.processXContentQueryFilter(IdsFilterParser.NAME, IdsFilterParser.class);
|
bindings.processXContentQueryFilter(IdsFilterParser.NAME, IdsFilterParser.class);
|
||||||
|
bindings.processXContentQueryFilter(LimitFilterParser.NAME, LimitFilterParser.class);
|
||||||
bindings.processXContentQueryFilter(TermFilterParser.NAME, TermFilterParser.class);
|
bindings.processXContentQueryFilter(TermFilterParser.NAME, TermFilterParser.class);
|
||||||
bindings.processXContentQueryFilter(TermsFilterParser.NAME, TermsFilterParser.class);
|
bindings.processXContentQueryFilter(TermsFilterParser.NAME, TermsFilterParser.class);
|
||||||
bindings.processXContentQueryFilter(RangeFilterParser.NAME, RangeFilterParser.class);
|
bindings.processXContentQueryFilter(RangeFilterParser.NAME, RangeFilterParser.class);
|
||||||
|
|
|
@ -35,6 +35,13 @@ public abstract class FilterBuilders {
|
||||||
return new MatchAllFilterBuilder();
|
return new MatchAllFilterBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A filter that limits the results to the provided limit value (per shard!).
|
||||||
|
*/
|
||||||
|
public static LimitFilterBuilder limitFilter(int limit) {
|
||||||
|
return new LimitFilterBuilder(limit);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new ids filter with the provided doc/mapping types.
|
* Creates a new ids filter with the provided doc/mapping types.
|
||||||
*
|
*
|
||||||
|
|
|
@ -0,0 +1,39 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elastic Search and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. Elastic Search licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.query.xcontent;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class LimitFilterBuilder extends BaseFilterBuilder {
|
||||||
|
|
||||||
|
private final int limit;
|
||||||
|
|
||||||
|
public LimitFilterBuilder(int limit) {
|
||||||
|
this.limit = limit;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
builder.startObject(LimitFilterParser.NAME);
|
||||||
|
builder.field("value", limit);
|
||||||
|
builder.endObject();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,68 @@
|
||||||
|
/*
|
||||||
|
* Licensed to Elastic Search and Shay Banon under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. Elastic Search licenses this
|
||||||
|
* file to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.query.xcontent;
|
||||||
|
|
||||||
|
import org.apache.lucene.search.Filter;
|
||||||
|
import org.elasticsearch.common.inject.Inject;
|
||||||
|
import org.elasticsearch.common.lucene.search.LimitFilter;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.index.AbstractIndexComponent;
|
||||||
|
import org.elasticsearch.index.Index;
|
||||||
|
import org.elasticsearch.index.query.QueryParsingException;
|
||||||
|
import org.elasticsearch.index.settings.IndexSettings;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class LimitFilterParser extends AbstractIndexComponent implements XContentFilterParser {
|
||||||
|
|
||||||
|
public static final String NAME = "limit";
|
||||||
|
|
||||||
|
@Inject public LimitFilterParser(Index index, @IndexSettings Settings settings) {
|
||||||
|
super(index, settings);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override public String[] names() {
|
||||||
|
return new String[]{NAME};
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||||
|
XContentParser parser = parseContext.parser();
|
||||||
|
|
||||||
|
int limit = -1;
|
||||||
|
String currentFieldName = null;
|
||||||
|
XContentParser.Token token;
|
||||||
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
|
currentFieldName = parser.currentName();
|
||||||
|
} else if (token.isValue()) {
|
||||||
|
if ("value".equals(currentFieldName)) {
|
||||||
|
limit = parser.intValue();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (limit == -1) {
|
||||||
|
throw new QueryParsingException(index, "No value specified for limit filter");
|
||||||
|
}
|
||||||
|
|
||||||
|
return new LimitFilter(limit);
|
||||||
|
}
|
||||||
|
}
|
|
@ -21,11 +21,23 @@ package org.elasticsearch.index.query.xcontent;
|
||||||
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.search.*;
|
import org.apache.lucene.search.*;
|
||||||
import org.apache.lucene.search.spans.*;
|
import org.apache.lucene.search.spans.SpanFirstQuery;
|
||||||
|
import org.apache.lucene.search.spans.SpanNearQuery;
|
||||||
|
import org.apache.lucene.search.spans.SpanNotQuery;
|
||||||
|
import org.apache.lucene.search.spans.SpanOrQuery;
|
||||||
|
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||||
import org.apache.lucene.util.NumericUtils;
|
import org.apache.lucene.util.NumericUtils;
|
||||||
import org.elasticsearch.common.inject.Injector;
|
import org.elasticsearch.common.inject.Injector;
|
||||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||||
import org.elasticsearch.common.lucene.search.*;
|
import org.elasticsearch.common.lucene.search.AndFilter;
|
||||||
|
import org.elasticsearch.common.lucene.search.LimitFilter;
|
||||||
|
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
||||||
|
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
|
||||||
|
import org.elasticsearch.common.lucene.search.NotFilter;
|
||||||
|
import org.elasticsearch.common.lucene.search.OrFilter;
|
||||||
|
import org.elasticsearch.common.lucene.search.Queries;
|
||||||
|
import org.elasticsearch.common.lucene.search.TermFilter;
|
||||||
|
import org.elasticsearch.common.lucene.search.XBooleanFilter;
|
||||||
import org.elasticsearch.common.lucene.search.function.BoostScoreFunction;
|
import org.elasticsearch.common.lucene.search.function.BoostScoreFunction;
|
||||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||||
import org.elasticsearch.common.settings.ImmutableSettings;
|
import org.elasticsearch.common.settings.ImmutableSettings;
|
||||||
|
@ -956,6 +968,19 @@ public class SimpleIndexQueryParserTests {
|
||||||
assertThat(((TermFilter) filteredQuery.getFilter()).getTerm(), equalTo(new Term("name.last", "banon")));
|
assertThat(((TermFilter) filteredQuery.getFilter()).getTerm(), equalTo(new Term("name.last", "banon")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test public void testLimitFilter() throws Exception {
|
||||||
|
IndexQueryParser queryParser = queryParser();
|
||||||
|
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/xcontent/limit-filter.json");
|
||||||
|
Query parsedQuery = queryParser.parse(query).query();
|
||||||
|
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
|
||||||
|
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
|
||||||
|
assertThat(filteredQuery.getFilter(), instanceOf(LimitFilter.class));
|
||||||
|
assertThat(((LimitFilter) filteredQuery.getFilter()).getLimit(), equalTo(2));
|
||||||
|
|
||||||
|
assertThat(filteredQuery.getQuery(), instanceOf(TermQuery.class));
|
||||||
|
assertThat(((TermQuery) filteredQuery.getQuery()).getTerm(), equalTo(new Term("name.first", "shay")));
|
||||||
|
}
|
||||||
|
|
||||||
@Test public void testTermFilterQuery() throws Exception {
|
@Test public void testTermFilterQuery() throws Exception {
|
||||||
IndexQueryParser queryParser = queryParser();
|
IndexQueryParser queryParser = queryParser();
|
||||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/xcontent/term-filter.json");
|
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/xcontent/term-filter.json");
|
||||||
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
{
|
||||||
|
"filtered" : {
|
||||||
|
"filter" : {
|
||||||
|
"limit" : {
|
||||||
|
"value" : 2
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"query" : {
|
||||||
|
"term" : {
|
||||||
|
"name.first" : "shay"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -195,6 +195,26 @@ public class SimpleQueryTests extends AbstractNodesTests {
|
||||||
assertThat(searchResponse.hits().totalHits(), equalTo(0l));
|
assertThat(searchResponse.hits().totalHits(), equalTo(0l));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test public void testLimitFilter() throws Exception {
|
||||||
|
try {
|
||||||
|
client.admin().indices().prepareDelete("test").execute().actionGet();
|
||||||
|
} catch (Exception e) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
client.admin().indices().prepareCreate("test").setSettings(ImmutableSettings.settingsBuilder().put("number_of_shards", 1)).execute().actionGet();
|
||||||
|
|
||||||
|
client.prepareIndex("test", "type1", "1").setSource("field1", "value1_1").execute().actionGet();
|
||||||
|
client.prepareIndex("test", "type1", "2").setSource("field1", "value1_2").execute().actionGet();
|
||||||
|
client.prepareIndex("test", "type1", "3").setSource("field2", "value2_3").execute().actionGet();
|
||||||
|
client.prepareIndex("test", "type1", "4").setSource("field3", "value3_4").execute().actionGet();
|
||||||
|
|
||||||
|
client.admin().indices().prepareRefresh().execute().actionGet();
|
||||||
|
|
||||||
|
SearchResponse searchResponse = client.prepareSearch().setQuery(filteredQuery(matchAllQuery(), limitFilter(2))).execute().actionGet();
|
||||||
|
assertThat(searchResponse.hits().totalHits(), equalTo(2l));
|
||||||
|
}
|
||||||
|
|
||||||
@Test public void filterExistsMissingTests() throws Exception {
|
@Test public void filterExistsMissingTests() throws Exception {
|
||||||
try {
|
try {
|
||||||
client.admin().indices().prepareDelete("test").execute().actionGet();
|
client.admin().indices().prepareDelete("test").execute().actionGet();
|
||||||
|
|
Loading…
Reference in New Issue